From 00e63d71a97e0fb3a71dc4e94902a4379dc05da9 Mon Sep 17 00:00:00 2001
From: Philip Rebohle <philip.rebohle@tu-dortmund.de>
Date: Tue, 10 Oct 2017 23:32:13 +0200
Subject: [PATCH] Initial commit

---
 build-win64.txt                            |    16 +
 include/vulkan/vk_icd.h                    |   152 +
 include/vulkan/vk_layer.h                  |   143 +
 include/vulkan/vk_platform.h               |   120 +
 include/vulkan/vk_sdk_platform.h           |    46 +
 include/vulkan/vulkan.h                    |  6429 ++++
 include/vulkan/vulkan.hpp                  | 32807 +++++++++++++++++++
 lib/SDL2.lib                               |   Bin 0 -> 120720 bytes
 lib/vulkan-1.lib                           |   Bin 0 -> 38268 bytes
 meson.build                                |    11 +
 src/dxvk/dxvk_adapter.cpp                  |   177 +
 src/dxvk/dxvk_adapter.h                    |   143 +
 src/dxvk/dxvk_buffer.cpp                   |     0
 src/dxvk/dxvk_buffer.h                     |    38 +
 src/dxvk/dxvk_cmdlist.cpp                  |    68 +
 src/dxvk/dxvk_cmdlist.h                    |    70 +
 src/dxvk/dxvk_context.cpp                  |    73 +
 src/dxvk/dxvk_context.h                    |    66 +
 src/dxvk/dxvk_context_state.cpp            |     0
 src/dxvk/dxvk_context_state.h              |     9 +
 src/dxvk/dxvk_device.cpp                   |   119 +
 src/dxvk/dxvk_device.h                     |   152 +
 src/dxvk/dxvk_framebuffer.cpp              |    95 +
 src/dxvk/dxvk_framebuffer.h                |   160 +
 src/dxvk/dxvk_hash.h                       |    34 +
 src/dxvk/dxvk_image.cpp                    |    66 +
 src/dxvk/dxvk_image.h                      |   151 +
 src/dxvk/dxvk_include.h                    |    10 +
 src/dxvk/dxvk_instance.cpp                 |   108 +
 src/dxvk/dxvk_instance.h                   |    56 +
 src/dxvk/dxvk_lifetime.cpp                 |    21 +
 src/dxvk/dxvk_lifetime.h                   |    45 +
 src/dxvk/dxvk_main.cpp                     |    11 +
 src/dxvk/dxvk_main.h                       |    14 +
 src/dxvk/dxvk_memory.cpp                   |   112 +
 src/dxvk/dxvk_memory.h                     |   105 +
 src/dxvk/dxvk_renderpass.cpp               |   166 +
 src/dxvk/dxvk_renderpass.h                 |   175 +
 src/dxvk/dxvk_resource.cpp                 |     9 +
 src/dxvk/dxvk_resource.h                   |    33 +
 src/dxvk/dxvk_surface.cpp                  |   150 +
 src/dxvk/dxvk_surface.h                    |    98 +
 src/dxvk/dxvk_swapchain.cpp                |   185 +
 src/dxvk/dxvk_swapchain.h                  |    96 +
 src/dxvk/dxvk_sync.cpp                     |    57 +
 src/dxvk/dxvk_sync.h                       |    89 +
 src/dxvk/meson.build                       |    32 +
 src/dxvk/vulkan/dxvk_vulkan_extensions.cpp |    71 +
 src/dxvk/vulkan/dxvk_vulkan_extensions.h   |   103 +
 src/dxvk/vulkan/dxvk_vulkan_loader.cpp     |    43 +
 src/dxvk/vulkan/dxvk_vulkan_loader.h       |   267 +
 src/dxvk/vulkan/dxvk_vulkan_loader_fn.h    |    48 +
 src/meson.build                            |     2 +
 src/util/log/log.cpp                       |    26 +
 src/util/log/log.h                         |    40 +
 src/util/meson.build                       |     6 +
 src/util/rc/util_rc.h                      |    36 +
 src/util/rc/util_rc_ptr.h                  |   126 +
 src/util/util_error.h                      |    31 +
 src/util/util_math.h                       |    12 +
 src/util/util_string.h                     |    23 +
 tests/dxvk/meson.build                     |     3 +
 tests/dxvk/test_dxvk_triangle.cpp          |   128 +
 tests/meson.build                          |     1 +
 64 files changed, 43683 insertions(+)
 create mode 100644 build-win64.txt
 create mode 100644 include/vulkan/vk_icd.h
 create mode 100644 include/vulkan/vk_layer.h
 create mode 100644 include/vulkan/vk_platform.h
 create mode 100644 include/vulkan/vk_sdk_platform.h
 create mode 100644 include/vulkan/vulkan.h
 create mode 100644 include/vulkan/vulkan.hpp
 create mode 100755 lib/SDL2.lib
 create mode 100755 lib/vulkan-1.lib
 create mode 100644 meson.build
 create mode 100644 src/dxvk/dxvk_adapter.cpp
 create mode 100644 src/dxvk/dxvk_adapter.h
 create mode 100644 src/dxvk/dxvk_buffer.cpp
 create mode 100644 src/dxvk/dxvk_buffer.h
 create mode 100644 src/dxvk/dxvk_cmdlist.cpp
 create mode 100644 src/dxvk/dxvk_cmdlist.h
 create mode 100644 src/dxvk/dxvk_context.cpp
 create mode 100644 src/dxvk/dxvk_context.h
 create mode 100644 src/dxvk/dxvk_context_state.cpp
 create mode 100644 src/dxvk/dxvk_context_state.h
 create mode 100644 src/dxvk/dxvk_device.cpp
 create mode 100644 src/dxvk/dxvk_device.h
 create mode 100644 src/dxvk/dxvk_framebuffer.cpp
 create mode 100644 src/dxvk/dxvk_framebuffer.h
 create mode 100644 src/dxvk/dxvk_hash.h
 create mode 100644 src/dxvk/dxvk_image.cpp
 create mode 100644 src/dxvk/dxvk_image.h
 create mode 100644 src/dxvk/dxvk_include.h
 create mode 100644 src/dxvk/dxvk_instance.cpp
 create mode 100644 src/dxvk/dxvk_instance.h
 create mode 100644 src/dxvk/dxvk_lifetime.cpp
 create mode 100644 src/dxvk/dxvk_lifetime.h
 create mode 100644 src/dxvk/dxvk_main.cpp
 create mode 100644 src/dxvk/dxvk_main.h
 create mode 100644 src/dxvk/dxvk_memory.cpp
 create mode 100644 src/dxvk/dxvk_memory.h
 create mode 100644 src/dxvk/dxvk_renderpass.cpp
 create mode 100644 src/dxvk/dxvk_renderpass.h
 create mode 100644 src/dxvk/dxvk_resource.cpp
 create mode 100644 src/dxvk/dxvk_resource.h
 create mode 100644 src/dxvk/dxvk_surface.cpp
 create mode 100644 src/dxvk/dxvk_surface.h
 create mode 100644 src/dxvk/dxvk_swapchain.cpp
 create mode 100644 src/dxvk/dxvk_swapchain.h
 create mode 100644 src/dxvk/dxvk_sync.cpp
 create mode 100644 src/dxvk/dxvk_sync.h
 create mode 100644 src/dxvk/meson.build
 create mode 100644 src/dxvk/vulkan/dxvk_vulkan_extensions.cpp
 create mode 100644 src/dxvk/vulkan/dxvk_vulkan_extensions.h
 create mode 100644 src/dxvk/vulkan/dxvk_vulkan_loader.cpp
 create mode 100644 src/dxvk/vulkan/dxvk_vulkan_loader.h
 create mode 100644 src/dxvk/vulkan/dxvk_vulkan_loader_fn.h
 create mode 100644 src/meson.build
 create mode 100644 src/util/log/log.cpp
 create mode 100644 src/util/log/log.h
 create mode 100644 src/util/meson.build
 create mode 100644 src/util/rc/util_rc.h
 create mode 100644 src/util/rc/util_rc_ptr.h
 create mode 100644 src/util/util_error.h
 create mode 100644 src/util/util_math.h
 create mode 100644 src/util/util_string.h
 create mode 100644 tests/dxvk/meson.build
 create mode 100644 tests/dxvk/test_dxvk_triangle.cpp
 create mode 100644 tests/meson.build

diff --git a/build-win64.txt b/build-win64.txt
new file mode 100644
index 000000000..98ad957c7
--- /dev/null
+++ b/build-win64.txt
@@ -0,0 +1,16 @@
+[binaries]
+c = '/usr/bin/x86_64-w64-mingw32-gcc'
+cpp = '/usr/bin/x86_64-w64-mingw32-g++'
+ar = '/usr/bin/x86_64-w64-mingw32-ar'
+strip = '/usr/bin/x86_64-w64-mingw32-strip'
+exe_wrapper = 'wine'
+
+[properties]
+cpp_args = ['-std=c++17']
+cpp_link_args = ['-static', '-static-libgcc', '-static-libstdc++']
+
+[host_machine]
+system = 'windows'
+cpu_family = 'x86_64'
+cpu = 'x86_64'
+endian = 'little'
\ No newline at end of file
diff --git a/include/vulkan/vk_icd.h b/include/vulkan/vk_icd.h
new file mode 100644
index 000000000..1983e5d4e
--- /dev/null
+++ b/include/vulkan/vk_icd.h
@@ -0,0 +1,152 @@
+//
+// File: vk_icd.h
+//
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#ifndef VKICD_H
+#define VKICD_H
+
+#include "vulkan.h"
+#include <stdbool.h>
+
+// Loader-ICD version negotiation API.  Versions add the following features:
+//   Version 0 - Initial.  Doesn't support vk_icdGetInstanceProcAddr
+//               or vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 1 - Add support for vk_icdGetInstanceProcAddr.
+//   Version 2 - Add Loader/ICD Interface version negotiation
+//               via vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 3 - Add ICD creation/destruction of KHR_surface objects.
+//   Version 4 - Add unknown physical device extension qyering via
+//               vk_icdGetPhysicalDeviceProcAddr.
+//   Version 5 - Tells ICDs that the loader is now paying attention to the
+//               application version of Vulkan passed into the ApplicationInfo
+//               structure during vkCreateInstance.  This will tell the ICD
+//               that if the loader is older, it should automatically fail a
+//               call for any API version > 1.0.  Otherwise, the loader will
+//               manually determine if it can support the expected version.
+#define CURRENT_LOADER_ICD_INTERFACE_VERSION 5
+#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0
+#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4
+typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion);
+
+// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this
+// flie directly, it won't be found.
+#ifndef PFN_GetPhysicalDeviceProcAddr
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+#endif
+
+/*
+ * The ICD must reserve space for a pointer for the loader's dispatch
+ * table, at the start of <each object>.
+ * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro.
+ */
+
+#define ICD_LOADER_MAGIC 0x01CDC0DE
+
+typedef union {
+    uintptr_t loaderMagic;
+    void *loaderData;
+} VK_LOADER_DATA;
+
+static inline void set_loader_magic_value(void *pNewObject) {
+    VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    loader_info->loaderMagic = ICD_LOADER_MAGIC;
+}
+
+static inline bool valid_loader_magic_value(void *pNewObject) {
+    const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC;
+}
+
+/*
+ * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that
+ * contains the platform-specific connection and surface information.
+ */
+typedef enum {
+    VK_ICD_WSI_PLATFORM_MIR,
+    VK_ICD_WSI_PLATFORM_WAYLAND,
+    VK_ICD_WSI_PLATFORM_WIN32,
+    VK_ICD_WSI_PLATFORM_XCB,
+    VK_ICD_WSI_PLATFORM_XLIB,
+    VK_ICD_WSI_PLATFORM_DISPLAY
+} VkIcdWsiPlatform;
+
+typedef struct {
+    VkIcdWsiPlatform platform;
+} VkIcdSurfaceBase;
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    MirConnection *connection;
+    MirSurface *mirSurface;
+} VkIcdSurfaceMir;
+#endif // VK_USE_PLATFORM_MIR_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct wl_display *display;
+    struct wl_surface *surface;
+} VkIcdSurfaceWayland;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    HINSTANCE hinstance;
+    HWND hwnd;
+} VkIcdSurfaceWin32;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    xcb_connection_t *connection;
+    xcb_window_t window;
+} VkIcdSurfaceXcb;
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    Display *dpy;
+    Window window;
+} VkIcdSurfaceXlib;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+typedef struct {
+    ANativeWindow* window;
+} VkIcdSurfaceAndroid;
+#endif //VK_USE_PLATFORM_ANDROID_KHR
+
+typedef struct {
+    VkIcdSurfaceBase base;
+    VkDisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR alphaMode;
+    VkExtent2D imageExtent;
+} VkIcdSurfaceDisplay;
+
+#endif // VKICD_H
diff --git a/include/vulkan/vk_layer.h b/include/vulkan/vk_layer.h
new file mode 100644
index 000000000..a7ac29150
--- /dev/null
+++ b/include/vulkan/vk_layer.h
@@ -0,0 +1,143 @@
+//
+// File: vk_layer.h
+//
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+/* Need to define dispatch table
+ * Core struct can then have ptr to dispatch table at the top
+ * Along with object ptrs for current and next OBJ
+ */
+#pragma once
+
+#include "vulkan.h"
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#else
+#define VK_LAYER_EXPORT
+#endif
+
+// Definition for VkLayerDispatchTable and VkLayerInstanceDispatchTable now appear in externally generated header
+#include "vk_layer_dispatch_table.h"
+
+#define MAX_NUM_UNKNOWN_EXTS 250
+
+ // Loader-Layer version negotiation API.  Versions add the following features:
+ //   Versions 0/1 - Initial.  Doesn't support vk_layerGetPhysicalDeviceProcAddr
+ //                  or vk_icdNegotiateLoaderLayerInterfaceVersion.
+ //   Version 2    - Add support for vk_layerGetPhysicalDeviceProcAddr and
+ //                  vk_icdNegotiateLoaderLayerInterfaceVersion.
+#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2
+#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1
+
+// Version negotiation values
+typedef enum VkNegotiateLayerStructType {
+    LAYER_NEGOTIATE_UNINTIALIZED = 0,
+    LAYER_NEGOTIATE_INTERFACE_STRUCT = 1,
+} VkNegotiateLayerStructType;
+
+// Version negotiation structures
+typedef struct VkNegotiateLayerInterface {
+    VkNegotiateLayerStructType sType;
+    void *pNext;
+    uint32_t loaderLayerInterfaceVersion;
+    PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr;
+} VkNegotiateLayerInterface;
+
+// Version negotiation functions
+typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct);
+
+// Function prototype for unknown physical device extension command
+typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device);
+
+// ------------------------------------------------------------------------------------------------
+// CreateInstance and CreateDevice support structures
+
+/* Sub type of structure for instance and device loader ext of CreateInfo.
+ * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+ * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+ * then VkLayerFunction indicates struct type pointed to by pNext
+ */
+typedef enum VkLayerFunction_ {
+    VK_LAYER_LINK_INFO = 0,
+    VK_LOADER_DATA_CALLBACK = 1
+} VkLayerFunction;
+
+typedef struct VkLayerInstanceLink_ {
+    struct VkLayerInstanceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr;
+} VkLayerInstanceLink;
+
+/*
+ * When creating the device chain the loader needs to pass
+ * down information about it's device structure needed at
+ * the end of the chain. Passing the data via the
+ * VkLayerDeviceInfo avoids issues with finding the
+ * exact instance being used.
+ */
+typedef struct VkLayerDeviceInfo_ {
+    void *device_info;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+} VkLayerDeviceInfo;
+
+typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance,
+        void *object);
+typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device,
+        void *object);
+
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerInstanceLink *pLayerInfo;
+        PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData;
+    } u;
+} VkLayerInstanceCreateInfo;
+
+typedef struct VkLayerDeviceLink_ {
+    struct VkLayerDeviceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr;
+} VkLayerDeviceLink;
+
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerDeviceLink *pLayerInfo;
+        PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData;
+    } u;
+} VkLayerDeviceCreateInfo;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct);
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/include/vulkan/vk_platform.h b/include/vulkan/vk_platform.h
new file mode 100644
index 000000000..72f80493c
--- /dev/null
+++ b/include/vulkan/vk_platform.h
@@ -0,0 +1,120 @@
+//
+// File: vk_platform.h
+//
+/*
+** Copyright (c) 2014-2017 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+
+#ifndef VK_PLATFORM_H_
+#define VK_PLATFORM_H_
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif // __cplusplus
+
+/*
+***************************************************************************************************
+*   Platform-specific directives and type declarations
+***************************************************************************************************
+*/
+
+/* Platform-specific calling convention macros.
+ *
+ * Platforms should define these so that Vulkan clients call Vulkan commands
+ * with the same calling conventions that the Vulkan implementation expects.
+ *
+ * VKAPI_ATTR - Placed before the return type in function declarations.
+ *              Useful for C++11 and GCC/Clang-style function attribute syntax.
+ * VKAPI_CALL - Placed after the return type in function declarations.
+ *              Useful for MSVC-style calling convention syntax.
+ * VKAPI_PTR  - Placed between the '(' and '*' in function pointer types.
+ *
+ * Function declaration:  VKAPI_ATTR void VKAPI_CALL vkCommand(void);
+ * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void);
+ */
+#if defined(_WIN32)
+    // On Windows, Vulkan commands use the stdcall convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL __stdcall
+    #define VKAPI_PTR  VKAPI_CALL
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7
+    #error "Vulkan isn't supported for the 'armeabi' NDK ABI"
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE)
+    // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat"
+    // calling convention, i.e. float parameters are passed in registers. This
+    // is true even if the rest of the application passes floats on the stack,
+    // as it does by default when compiling for the armeabi-v7a NDK ABI.
+    #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp")))
+    #define VKAPI_CALL
+    #define VKAPI_PTR  VKAPI_ATTR
+#else
+    // On other platforms, use the default calling convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL
+    #define VKAPI_PTR
+#endif
+
+#include <stddef.h>
+
+#if !defined(VK_NO_STDINT_H)
+    #if defined(_MSC_VER) && (_MSC_VER < 1600)
+        typedef signed   __int8  int8_t;
+        typedef unsigned __int8  uint8_t;
+        typedef signed   __int16 int16_t;
+        typedef unsigned __int16 uint16_t;
+        typedef signed   __int32 int32_t;
+        typedef unsigned __int32 uint32_t;
+        typedef signed   __int64 int64_t;
+        typedef unsigned __int64 uint64_t;
+    #else
+        #include <stdint.h>
+    #endif
+#endif // !defined(VK_NO_STDINT_H)
+
+#ifdef __cplusplus
+} // extern "C"
+#endif // __cplusplus
+
+// Platform-specific headers required by platform window system extensions.
+// These are enabled prior to #including "vulkan.h". The same enable then
+// controls inclusion of the extension interfaces in vulkan.h.
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#include <android/native_window.h>
+#endif
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+#include <mir_toolkit/client_types.h>
+#endif
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+#include <wayland-client.h>
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#include <windows.h>
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#include <X11/Xlib.h>
+#endif
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+#include <xcb/xcb.h>
+#endif
+
+#endif
diff --git a/include/vulkan/vk_sdk_platform.h b/include/vulkan/vk_sdk_platform.h
new file mode 100644
index 000000000..ef9a000fb
--- /dev/null
+++ b/include/vulkan/vk_sdk_platform.h
@@ -0,0 +1,46 @@
+//
+// File: vk_sdk_platform.h
+//
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VK_SDK_PLATFORM_H
+#define VK_SDK_PLATFORM_H
+
+#if defined(_WIN32)
+#define NOMINMAX
+#ifndef __cplusplus
+#undef inline
+#define inline __inline
+#endif // __cplusplus
+
+#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/)
+// C99:
+// Microsoft didn't implement C99 in Visual Studio; but started adding it with
+// VS2013.  However, VS2013 still didn't have snprintf().  The following is a
+// work-around (Note: The _CRT_SECURE_NO_WARNINGS macro must be set in the
+// "CMakeLists.txt" file).
+// NOTE: This is fixed in Visual Studio 2015.
+#define snprintf _snprintf
+#endif
+
+#define strdup _strdup
+
+#endif // _WIN32
+
+#endif // VK_SDK_PLATFORM_H
diff --git a/include/vulkan/vulkan.h b/include/vulkan/vulkan.h
new file mode 100644
index 000000000..f20af411c
--- /dev/null
+++ b/include/vulkan/vulkan.h
@@ -0,0 +1,6429 @@
+#ifndef VULKAN_H_
+#define VULKAN_H_ 1
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+** Copyright (c) 2015-2017 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#define VK_VERSION_1_0 1
+#include "vk_platform.h"
+
+#define VK_MAKE_VERSION(major, minor, patch) \
+    (((major) << 22) | ((minor) << 12) | (patch))
+
+// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead.
+//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0
+
+// Vulkan 1.0 version number
+#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0
+
+#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22)
+#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff)
+#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff)
+// Version of this file
+#define VK_HEADER_VERSION 55
+
+
+#define VK_NULL_HANDLE 0
+        
+
+
+#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object;
+
+
+#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE)
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object;
+#else
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object;
+#endif
+#endif
+        
+
+
+typedef uint32_t VkFlags;
+typedef uint32_t VkBool32;
+typedef uint64_t VkDeviceSize;
+typedef uint32_t VkSampleMask;
+
+VK_DEFINE_HANDLE(VkInstance)
+VK_DEFINE_HANDLE(VkPhysicalDevice)
+VK_DEFINE_HANDLE(VkDevice)
+VK_DEFINE_HANDLE(VkQueue)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore)
+VK_DEFINE_HANDLE(VkCommandBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool)
+
+#define VK_LOD_CLAMP_NONE                 1000.0f
+#define VK_REMAINING_MIP_LEVELS           (~0U)
+#define VK_REMAINING_ARRAY_LAYERS         (~0U)
+#define VK_WHOLE_SIZE                     (~0ULL)
+#define VK_ATTACHMENT_UNUSED              (~0U)
+#define VK_TRUE                           1
+#define VK_FALSE                          0
+#define VK_QUEUE_FAMILY_IGNORED           (~0U)
+#define VK_SUBPASS_EXTERNAL               (~0U)
+#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE  256
+#define VK_UUID_SIZE                      16
+#define VK_MAX_MEMORY_TYPES               32
+#define VK_MAX_MEMORY_HEAPS               16
+#define VK_MAX_EXTENSION_NAME_SIZE        256
+#define VK_MAX_DESCRIPTION_SIZE           256
+
+
+typedef enum VkPipelineCacheHeaderVersion {
+    VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
+    VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE,
+    VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE,
+    VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1),
+    VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheHeaderVersion;
+
+typedef enum VkResult {
+    VK_SUCCESS = 0,
+    VK_NOT_READY = 1,
+    VK_TIMEOUT = 2,
+    VK_EVENT_SET = 3,
+    VK_EVENT_RESET = 4,
+    VK_INCOMPLETE = 5,
+    VK_ERROR_OUT_OF_HOST_MEMORY = -1,
+    VK_ERROR_OUT_OF_DEVICE_MEMORY = -2,
+    VK_ERROR_INITIALIZATION_FAILED = -3,
+    VK_ERROR_DEVICE_LOST = -4,
+    VK_ERROR_MEMORY_MAP_FAILED = -5,
+    VK_ERROR_LAYER_NOT_PRESENT = -6,
+    VK_ERROR_EXTENSION_NOT_PRESENT = -7,
+    VK_ERROR_FEATURE_NOT_PRESENT = -8,
+    VK_ERROR_INCOMPATIBLE_DRIVER = -9,
+    VK_ERROR_TOO_MANY_OBJECTS = -10,
+    VK_ERROR_FORMAT_NOT_SUPPORTED = -11,
+    VK_ERROR_FRAGMENTED_POOL = -12,
+    VK_ERROR_SURFACE_LOST_KHR = -1000000000,
+    VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001,
+    VK_SUBOPTIMAL_KHR = 1000001003,
+    VK_ERROR_OUT_OF_DATE_KHR = -1000001004,
+    VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001,
+    VK_ERROR_VALIDATION_FAILED_EXT = -1000011001,
+    VK_ERROR_INVALID_SHADER_NV = -1000012000,
+    VK_ERROR_OUT_OF_POOL_MEMORY_KHR = -1000069000,
+    VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = -1000072003,
+    VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL,
+    VK_RESULT_END_RANGE = VK_INCOMPLETE,
+    VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1),
+    VK_RESULT_MAX_ENUM = 0x7FFFFFFF
+} VkResult;
+
+typedef enum VkStructureType {
+    VK_STRUCTURE_TYPE_APPLICATION_INFO = 0,
+    VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2,
+    VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO = 4,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5,
+    VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6,
+    VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7,
+    VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8,
+    VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9,
+    VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10,
+    VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11,
+    VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12,
+    VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13,
+    VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16,
+    VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18,
+    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19,
+    VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23,
+    VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24,
+    VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26,
+    VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28,
+    VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29,
+    VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30,
+    VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35,
+    VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38,
+    VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42,
+    VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46,
+    VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47,
+    VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000,
+    VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001,
+    VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000,
+    VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001,
+    VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000,
+    VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000,
+    VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000,
+    VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000,
+    VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR = 1000007000,
+    VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000,
+    VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000,
+    VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002,
+    VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHX = 1000053000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHX = 1000053001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHX = 1000053002,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = 1000059000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = 1000059001,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = 1000059002,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = 1000059004,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000059005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = 1000059006,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = 1000059008,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHX = 1000060000,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHX = 1000060001,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHX = 1000060002,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHX = 1000060003,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHX = 1000060004,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHX = 1000060005,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHX = 1000060006,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHX = 1000060007,
+    VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHX = 1000060008,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHX = 1000060009,
+    VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHX = 1000060010,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHX = 1000060011,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHX = 1000060012,
+    VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000,
+    VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHX = 1000070000,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHX = 1000070001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = 1000071000,
+    VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = 1000071001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = 1000071002,
+    VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = 1000071003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = 1000071004,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = 1000072000,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = 1000072001,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = 1000072002,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001,
+    VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002,
+    VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000,
+    VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001,
+    VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = 1000076000,
+    VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = 1000076001,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = 1000077000,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001,
+    VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = 1000083000,
+    VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = 1000085000,
+    VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000,
+    VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001,
+    VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002,
+    VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003,
+    VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004,
+    VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = 1000090000,
+    VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000,
+    VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001,
+    VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003,
+    VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000,
+    VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001,
+    VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000,
+    VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = 1000112000,
+    VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = 1000112001,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = 1000113000,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001,
+    VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000,
+    VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001,
+    VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = 1000120000,
+    VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000,
+    VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = 1000127000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = 1000127001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000,
+    VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146000,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146001,
+    VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146002,
+    VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = 1000146003,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = 1000146004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000,
+    VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+    VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1),
+    VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkStructureType;
+
+typedef enum VkSystemAllocationScope {
+    VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0,
+    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1,
+    VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2,
+    VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3,
+    VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4,
+    VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+    VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE,
+    VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1),
+    VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF
+} VkSystemAllocationScope;
+
+typedef enum VkInternalAllocationType {
+    VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0,
+    VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
+    VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
+    VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1),
+    VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkInternalAllocationType;
+
+typedef enum VkFormat {
+    VK_FORMAT_UNDEFINED = 0,
+    VK_FORMAT_R4G4_UNORM_PACK8 = 1,
+    VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2,
+    VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3,
+    VK_FORMAT_R5G6B5_UNORM_PACK16 = 4,
+    VK_FORMAT_B5G6R5_UNORM_PACK16 = 5,
+    VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6,
+    VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7,
+    VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8,
+    VK_FORMAT_R8_UNORM = 9,
+    VK_FORMAT_R8_SNORM = 10,
+    VK_FORMAT_R8_USCALED = 11,
+    VK_FORMAT_R8_SSCALED = 12,
+    VK_FORMAT_R8_UINT = 13,
+    VK_FORMAT_R8_SINT = 14,
+    VK_FORMAT_R8_SRGB = 15,
+    VK_FORMAT_R8G8_UNORM = 16,
+    VK_FORMAT_R8G8_SNORM = 17,
+    VK_FORMAT_R8G8_USCALED = 18,
+    VK_FORMAT_R8G8_SSCALED = 19,
+    VK_FORMAT_R8G8_UINT = 20,
+    VK_FORMAT_R8G8_SINT = 21,
+    VK_FORMAT_R8G8_SRGB = 22,
+    VK_FORMAT_R8G8B8_UNORM = 23,
+    VK_FORMAT_R8G8B8_SNORM = 24,
+    VK_FORMAT_R8G8B8_USCALED = 25,
+    VK_FORMAT_R8G8B8_SSCALED = 26,
+    VK_FORMAT_R8G8B8_UINT = 27,
+    VK_FORMAT_R8G8B8_SINT = 28,
+    VK_FORMAT_R8G8B8_SRGB = 29,
+    VK_FORMAT_B8G8R8_UNORM = 30,
+    VK_FORMAT_B8G8R8_SNORM = 31,
+    VK_FORMAT_B8G8R8_USCALED = 32,
+    VK_FORMAT_B8G8R8_SSCALED = 33,
+    VK_FORMAT_B8G8R8_UINT = 34,
+    VK_FORMAT_B8G8R8_SINT = 35,
+    VK_FORMAT_B8G8R8_SRGB = 36,
+    VK_FORMAT_R8G8B8A8_UNORM = 37,
+    VK_FORMAT_R8G8B8A8_SNORM = 38,
+    VK_FORMAT_R8G8B8A8_USCALED = 39,
+    VK_FORMAT_R8G8B8A8_SSCALED = 40,
+    VK_FORMAT_R8G8B8A8_UINT = 41,
+    VK_FORMAT_R8G8B8A8_SINT = 42,
+    VK_FORMAT_R8G8B8A8_SRGB = 43,
+    VK_FORMAT_B8G8R8A8_UNORM = 44,
+    VK_FORMAT_B8G8R8A8_SNORM = 45,
+    VK_FORMAT_B8G8R8A8_USCALED = 46,
+    VK_FORMAT_B8G8R8A8_SSCALED = 47,
+    VK_FORMAT_B8G8R8A8_UINT = 48,
+    VK_FORMAT_B8G8R8A8_SINT = 49,
+    VK_FORMAT_B8G8R8A8_SRGB = 50,
+    VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51,
+    VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52,
+    VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53,
+    VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54,
+    VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55,
+    VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56,
+    VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57,
+    VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58,
+    VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59,
+    VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60,
+    VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61,
+    VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62,
+    VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63,
+    VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64,
+    VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65,
+    VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66,
+    VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67,
+    VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68,
+    VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69,
+    VK_FORMAT_R16_UNORM = 70,
+    VK_FORMAT_R16_SNORM = 71,
+    VK_FORMAT_R16_USCALED = 72,
+    VK_FORMAT_R16_SSCALED = 73,
+    VK_FORMAT_R16_UINT = 74,
+    VK_FORMAT_R16_SINT = 75,
+    VK_FORMAT_R16_SFLOAT = 76,
+    VK_FORMAT_R16G16_UNORM = 77,
+    VK_FORMAT_R16G16_SNORM = 78,
+    VK_FORMAT_R16G16_USCALED = 79,
+    VK_FORMAT_R16G16_SSCALED = 80,
+    VK_FORMAT_R16G16_UINT = 81,
+    VK_FORMAT_R16G16_SINT = 82,
+    VK_FORMAT_R16G16_SFLOAT = 83,
+    VK_FORMAT_R16G16B16_UNORM = 84,
+    VK_FORMAT_R16G16B16_SNORM = 85,
+    VK_FORMAT_R16G16B16_USCALED = 86,
+    VK_FORMAT_R16G16B16_SSCALED = 87,
+    VK_FORMAT_R16G16B16_UINT = 88,
+    VK_FORMAT_R16G16B16_SINT = 89,
+    VK_FORMAT_R16G16B16_SFLOAT = 90,
+    VK_FORMAT_R16G16B16A16_UNORM = 91,
+    VK_FORMAT_R16G16B16A16_SNORM = 92,
+    VK_FORMAT_R16G16B16A16_USCALED = 93,
+    VK_FORMAT_R16G16B16A16_SSCALED = 94,
+    VK_FORMAT_R16G16B16A16_UINT = 95,
+    VK_FORMAT_R16G16B16A16_SINT = 96,
+    VK_FORMAT_R16G16B16A16_SFLOAT = 97,
+    VK_FORMAT_R32_UINT = 98,
+    VK_FORMAT_R32_SINT = 99,
+    VK_FORMAT_R32_SFLOAT = 100,
+    VK_FORMAT_R32G32_UINT = 101,
+    VK_FORMAT_R32G32_SINT = 102,
+    VK_FORMAT_R32G32_SFLOAT = 103,
+    VK_FORMAT_R32G32B32_UINT = 104,
+    VK_FORMAT_R32G32B32_SINT = 105,
+    VK_FORMAT_R32G32B32_SFLOAT = 106,
+    VK_FORMAT_R32G32B32A32_UINT = 107,
+    VK_FORMAT_R32G32B32A32_SINT = 108,
+    VK_FORMAT_R32G32B32A32_SFLOAT = 109,
+    VK_FORMAT_R64_UINT = 110,
+    VK_FORMAT_R64_SINT = 111,
+    VK_FORMAT_R64_SFLOAT = 112,
+    VK_FORMAT_R64G64_UINT = 113,
+    VK_FORMAT_R64G64_SINT = 114,
+    VK_FORMAT_R64G64_SFLOAT = 115,
+    VK_FORMAT_R64G64B64_UINT = 116,
+    VK_FORMAT_R64G64B64_SINT = 117,
+    VK_FORMAT_R64G64B64_SFLOAT = 118,
+    VK_FORMAT_R64G64B64A64_UINT = 119,
+    VK_FORMAT_R64G64B64A64_SINT = 120,
+    VK_FORMAT_R64G64B64A64_SFLOAT = 121,
+    VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122,
+    VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123,
+    VK_FORMAT_D16_UNORM = 124,
+    VK_FORMAT_X8_D24_UNORM_PACK32 = 125,
+    VK_FORMAT_D32_SFLOAT = 126,
+    VK_FORMAT_S8_UINT = 127,
+    VK_FORMAT_D16_UNORM_S8_UINT = 128,
+    VK_FORMAT_D24_UNORM_S8_UINT = 129,
+    VK_FORMAT_D32_SFLOAT_S8_UINT = 130,
+    VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131,
+    VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132,
+    VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133,
+    VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134,
+    VK_FORMAT_BC2_UNORM_BLOCK = 135,
+    VK_FORMAT_BC2_SRGB_BLOCK = 136,
+    VK_FORMAT_BC3_UNORM_BLOCK = 137,
+    VK_FORMAT_BC3_SRGB_BLOCK = 138,
+    VK_FORMAT_BC4_UNORM_BLOCK = 139,
+    VK_FORMAT_BC4_SNORM_BLOCK = 140,
+    VK_FORMAT_BC5_UNORM_BLOCK = 141,
+    VK_FORMAT_BC5_SNORM_BLOCK = 142,
+    VK_FORMAT_BC6H_UFLOAT_BLOCK = 143,
+    VK_FORMAT_BC6H_SFLOAT_BLOCK = 144,
+    VK_FORMAT_BC7_UNORM_BLOCK = 145,
+    VK_FORMAT_BC7_SRGB_BLOCK = 146,
+    VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147,
+    VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148,
+    VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149,
+    VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150,
+    VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151,
+    VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152,
+    VK_FORMAT_EAC_R11_UNORM_BLOCK = 153,
+    VK_FORMAT_EAC_R11_SNORM_BLOCK = 154,
+    VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155,
+    VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156,
+    VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157,
+    VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158,
+    VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159,
+    VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160,
+    VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161,
+    VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162,
+    VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163,
+    VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164,
+    VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165,
+    VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166,
+    VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167,
+    VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168,
+    VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169,
+    VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170,
+    VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171,
+    VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172,
+    VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173,
+    VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174,
+    VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175,
+    VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176,
+    VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177,
+    VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178,
+    VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179,
+    VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180,
+    VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181,
+    VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182,
+    VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183,
+    VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184,
+    VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000,
+    VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001,
+    VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002,
+    VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003,
+    VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004,
+    VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005,
+    VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
+    VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
+    VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED,
+    VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+    VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1),
+    VK_FORMAT_MAX_ENUM = 0x7FFFFFFF
+} VkFormat;
+
+typedef enum VkImageType {
+    VK_IMAGE_TYPE_1D = 0,
+    VK_IMAGE_TYPE_2D = 1,
+    VK_IMAGE_TYPE_3D = 2,
+    VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D,
+    VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D,
+    VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1),
+    VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageType;
+
+typedef enum VkImageTiling {
+    VK_IMAGE_TILING_OPTIMAL = 0,
+    VK_IMAGE_TILING_LINEAR = 1,
+    VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL,
+    VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR,
+    VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1),
+    VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF
+} VkImageTiling;
+
+typedef enum VkPhysicalDeviceType {
+    VK_PHYSICAL_DEVICE_TYPE_OTHER = 0,
+    VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1,
+    VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2,
+    VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3,
+    VK_PHYSICAL_DEVICE_TYPE_CPU = 4,
+    VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+    VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU,
+    VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1),
+    VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkPhysicalDeviceType;
+
+typedef enum VkQueryType {
+    VK_QUERY_TYPE_OCCLUSION = 0,
+    VK_QUERY_TYPE_PIPELINE_STATISTICS = 1,
+    VK_QUERY_TYPE_TIMESTAMP = 2,
+    VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION,
+    VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP,
+    VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1),
+    VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkQueryType;
+
+typedef enum VkSharingMode {
+    VK_SHARING_MODE_EXCLUSIVE = 0,
+    VK_SHARING_MODE_CONCURRENT = 1,
+    VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE,
+    VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT,
+    VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1),
+    VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSharingMode;
+
+typedef enum VkImageLayout {
+    VK_IMAGE_LAYOUT_UNDEFINED = 0,
+    VK_IMAGE_LAYOUT_GENERAL = 1,
+    VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4,
+    VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5,
+    VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6,
+    VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7,
+    VK_IMAGE_LAYOUT_PREINITIALIZED = 8,
+    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002,
+    VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
+    VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED,
+    VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1),
+    VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF
+} VkImageLayout;
+
+typedef enum VkImageViewType {
+    VK_IMAGE_VIEW_TYPE_1D = 0,
+    VK_IMAGE_VIEW_TYPE_2D = 1,
+    VK_IMAGE_VIEW_TYPE_3D = 2,
+    VK_IMAGE_VIEW_TYPE_CUBE = 3,
+    VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4,
+    VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5,
+    VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6,
+    VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D,
+    VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
+    VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1),
+    VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewType;
+
+typedef enum VkComponentSwizzle {
+    VK_COMPONENT_SWIZZLE_IDENTITY = 0,
+    VK_COMPONENT_SWIZZLE_ZERO = 1,
+    VK_COMPONENT_SWIZZLE_ONE = 2,
+    VK_COMPONENT_SWIZZLE_R = 3,
+    VK_COMPONENT_SWIZZLE_G = 4,
+    VK_COMPONENT_SWIZZLE_B = 5,
+    VK_COMPONENT_SWIZZLE_A = 6,
+    VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY,
+    VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A,
+    VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1),
+    VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF
+} VkComponentSwizzle;
+
+typedef enum VkVertexInputRate {
+    VK_VERTEX_INPUT_RATE_VERTEX = 0,
+    VK_VERTEX_INPUT_RATE_INSTANCE = 1,
+    VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX,
+    VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE,
+    VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1),
+    VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF
+} VkVertexInputRate;
+
+typedef enum VkPrimitiveTopology {
+    VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9,
+    VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10,
+    VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+    VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST,
+    VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1),
+    VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF
+} VkPrimitiveTopology;
+
+typedef enum VkPolygonMode {
+    VK_POLYGON_MODE_FILL = 0,
+    VK_POLYGON_MODE_LINE = 1,
+    VK_POLYGON_MODE_POINT = 2,
+    VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000,
+    VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL,
+    VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT,
+    VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1),
+    VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkPolygonMode;
+
+typedef enum VkFrontFace {
+    VK_FRONT_FACE_COUNTER_CLOCKWISE = 0,
+    VK_FRONT_FACE_CLOCKWISE = 1,
+    VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+    VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE,
+    VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1),
+    VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF
+} VkFrontFace;
+
+typedef enum VkCompareOp {
+    VK_COMPARE_OP_NEVER = 0,
+    VK_COMPARE_OP_LESS = 1,
+    VK_COMPARE_OP_EQUAL = 2,
+    VK_COMPARE_OP_LESS_OR_EQUAL = 3,
+    VK_COMPARE_OP_GREATER = 4,
+    VK_COMPARE_OP_NOT_EQUAL = 5,
+    VK_COMPARE_OP_GREATER_OR_EQUAL = 6,
+    VK_COMPARE_OP_ALWAYS = 7,
+    VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER,
+    VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS,
+    VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1),
+    VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkCompareOp;
+
+typedef enum VkStencilOp {
+    VK_STENCIL_OP_KEEP = 0,
+    VK_STENCIL_OP_ZERO = 1,
+    VK_STENCIL_OP_REPLACE = 2,
+    VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3,
+    VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4,
+    VK_STENCIL_OP_INVERT = 5,
+    VK_STENCIL_OP_INCREMENT_AND_WRAP = 6,
+    VK_STENCIL_OP_DECREMENT_AND_WRAP = 7,
+    VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP,
+    VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP,
+    VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1),
+    VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF
+} VkStencilOp;
+
+typedef enum VkLogicOp {
+    VK_LOGIC_OP_CLEAR = 0,
+    VK_LOGIC_OP_AND = 1,
+    VK_LOGIC_OP_AND_REVERSE = 2,
+    VK_LOGIC_OP_COPY = 3,
+    VK_LOGIC_OP_AND_INVERTED = 4,
+    VK_LOGIC_OP_NO_OP = 5,
+    VK_LOGIC_OP_XOR = 6,
+    VK_LOGIC_OP_OR = 7,
+    VK_LOGIC_OP_NOR = 8,
+    VK_LOGIC_OP_EQUIVALENT = 9,
+    VK_LOGIC_OP_INVERT = 10,
+    VK_LOGIC_OP_OR_REVERSE = 11,
+    VK_LOGIC_OP_COPY_INVERTED = 12,
+    VK_LOGIC_OP_OR_INVERTED = 13,
+    VK_LOGIC_OP_NAND = 14,
+    VK_LOGIC_OP_SET = 15,
+    VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR,
+    VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET,
+    VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1),
+    VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF
+} VkLogicOp;
+
+typedef enum VkBlendFactor {
+    VK_BLEND_FACTOR_ZERO = 0,
+    VK_BLEND_FACTOR_ONE = 1,
+    VK_BLEND_FACTOR_SRC_COLOR = 2,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3,
+    VK_BLEND_FACTOR_DST_COLOR = 4,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5,
+    VK_BLEND_FACTOR_SRC_ALPHA = 6,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7,
+    VK_BLEND_FACTOR_DST_ALPHA = 8,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9,
+    VK_BLEND_FACTOR_CONSTANT_COLOR = 10,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11,
+    VK_BLEND_FACTOR_CONSTANT_ALPHA = 12,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13,
+    VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14,
+    VK_BLEND_FACTOR_SRC1_COLOR = 15,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16,
+    VK_BLEND_FACTOR_SRC1_ALPHA = 17,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18,
+    VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO,
+    VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA,
+    VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1),
+    VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF
+} VkBlendFactor;
+
+typedef enum VkBlendOp {
+    VK_BLEND_OP_ADD = 0,
+    VK_BLEND_OP_SUBTRACT = 1,
+    VK_BLEND_OP_REVERSE_SUBTRACT = 2,
+    VK_BLEND_OP_MIN = 3,
+    VK_BLEND_OP_MAX = 4,
+    VK_BLEND_OP_ZERO_EXT = 1000148000,
+    VK_BLEND_OP_SRC_EXT = 1000148001,
+    VK_BLEND_OP_DST_EXT = 1000148002,
+    VK_BLEND_OP_SRC_OVER_EXT = 1000148003,
+    VK_BLEND_OP_DST_OVER_EXT = 1000148004,
+    VK_BLEND_OP_SRC_IN_EXT = 1000148005,
+    VK_BLEND_OP_DST_IN_EXT = 1000148006,
+    VK_BLEND_OP_SRC_OUT_EXT = 1000148007,
+    VK_BLEND_OP_DST_OUT_EXT = 1000148008,
+    VK_BLEND_OP_SRC_ATOP_EXT = 1000148009,
+    VK_BLEND_OP_DST_ATOP_EXT = 1000148010,
+    VK_BLEND_OP_XOR_EXT = 1000148011,
+    VK_BLEND_OP_MULTIPLY_EXT = 1000148012,
+    VK_BLEND_OP_SCREEN_EXT = 1000148013,
+    VK_BLEND_OP_OVERLAY_EXT = 1000148014,
+    VK_BLEND_OP_DARKEN_EXT = 1000148015,
+    VK_BLEND_OP_LIGHTEN_EXT = 1000148016,
+    VK_BLEND_OP_COLORDODGE_EXT = 1000148017,
+    VK_BLEND_OP_COLORBURN_EXT = 1000148018,
+    VK_BLEND_OP_HARDLIGHT_EXT = 1000148019,
+    VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020,
+    VK_BLEND_OP_DIFFERENCE_EXT = 1000148021,
+    VK_BLEND_OP_EXCLUSION_EXT = 1000148022,
+    VK_BLEND_OP_INVERT_EXT = 1000148023,
+    VK_BLEND_OP_INVERT_RGB_EXT = 1000148024,
+    VK_BLEND_OP_LINEARDODGE_EXT = 1000148025,
+    VK_BLEND_OP_LINEARBURN_EXT = 1000148026,
+    VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027,
+    VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028,
+    VK_BLEND_OP_PINLIGHT_EXT = 1000148029,
+    VK_BLEND_OP_HARDMIX_EXT = 1000148030,
+    VK_BLEND_OP_HSL_HUE_EXT = 1000148031,
+    VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032,
+    VK_BLEND_OP_HSL_COLOR_EXT = 1000148033,
+    VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034,
+    VK_BLEND_OP_PLUS_EXT = 1000148035,
+    VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036,
+    VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037,
+    VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038,
+    VK_BLEND_OP_MINUS_EXT = 1000148039,
+    VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040,
+    VK_BLEND_OP_CONTRAST_EXT = 1000148041,
+    VK_BLEND_OP_INVERT_OVG_EXT = 1000148042,
+    VK_BLEND_OP_RED_EXT = 1000148043,
+    VK_BLEND_OP_GREEN_EXT = 1000148044,
+    VK_BLEND_OP_BLUE_EXT = 1000148045,
+    VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD,
+    VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX,
+    VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1),
+    VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF
+} VkBlendOp;
+
+typedef enum VkDynamicState {
+    VK_DYNAMIC_STATE_VIEWPORT = 0,
+    VK_DYNAMIC_STATE_SCISSOR = 1,
+    VK_DYNAMIC_STATE_LINE_WIDTH = 2,
+    VK_DYNAMIC_STATE_DEPTH_BIAS = 3,
+    VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5,
+    VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6,
+    VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7,
+    VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8,
+    VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000,
+    VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000,
+    VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT,
+    VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+    VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1),
+    VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF
+} VkDynamicState;
+
+typedef enum VkFilter {
+    VK_FILTER_NEAREST = 0,
+    VK_FILTER_LINEAR = 1,
+    VK_FILTER_CUBIC_IMG = 1000015000,
+    VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST,
+    VK_FILTER_END_RANGE = VK_FILTER_LINEAR,
+    VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1),
+    VK_FILTER_MAX_ENUM = 0x7FFFFFFF
+} VkFilter;
+
+typedef enum VkSamplerMipmapMode {
+    VK_SAMPLER_MIPMAP_MODE_NEAREST = 0,
+    VK_SAMPLER_MIPMAP_MODE_LINEAR = 1,
+    VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+    VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR,
+    VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1),
+    VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerMipmapMode;
+
+typedef enum VkSamplerAddressMode {
+    VK_SAMPLER_ADDRESS_MODE_REPEAT = 0,
+    VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3,
+    VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4,
+    VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+    VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1),
+    VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerAddressMode;
+
+typedef enum VkBorderColor {
+    VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0,
+    VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2,
+    VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4,
+    VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5,
+    VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
+    VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1),
+    VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF
+} VkBorderColor;
+
+typedef enum VkDescriptorType {
+    VK_DESCRIPTOR_TYPE_SAMPLER = 0,
+    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1,
+    VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2,
+    VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3,
+    VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4,
+    VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9,
+    VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10,
+    VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER,
+    VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
+    VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1),
+    VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorType;
+
+typedef enum VkAttachmentLoadOp {
+    VK_ATTACHMENT_LOAD_OP_LOAD = 0,
+    VK_ATTACHMENT_LOAD_OP_CLEAR = 1,
+    VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2,
+    VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD,
+    VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+    VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1),
+    VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentLoadOp;
+
+typedef enum VkAttachmentStoreOp {
+    VK_ATTACHMENT_STORE_OP_STORE = 0,
+    VK_ATTACHMENT_STORE_OP_DONT_CARE = 1,
+    VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE,
+    VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+    VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1),
+    VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentStoreOp;
+
+typedef enum VkPipelineBindPoint {
+    VK_PIPELINE_BIND_POINT_GRAPHICS = 0,
+    VK_PIPELINE_BIND_POINT_COMPUTE = 1,
+    VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS,
+    VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE,
+    VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1),
+    VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineBindPoint;
+
+typedef enum VkCommandBufferLevel {
+    VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0,
+    VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1,
+    VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+    VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY,
+    VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1),
+    VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferLevel;
+
+typedef enum VkIndexType {
+    VK_INDEX_TYPE_UINT16 = 0,
+    VK_INDEX_TYPE_UINT32 = 1,
+    VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16,
+    VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32,
+    VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1),
+    VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkIndexType;
+
+typedef enum VkSubpassContents {
+    VK_SUBPASS_CONTENTS_INLINE = 0,
+    VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1,
+    VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE,
+    VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS,
+    VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1),
+    VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassContents;
+
+typedef enum VkObjectType {
+    VK_OBJECT_TYPE_UNKNOWN = 0,
+    VK_OBJECT_TYPE_INSTANCE = 1,
+    VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2,
+    VK_OBJECT_TYPE_DEVICE = 3,
+    VK_OBJECT_TYPE_QUEUE = 4,
+    VK_OBJECT_TYPE_SEMAPHORE = 5,
+    VK_OBJECT_TYPE_COMMAND_BUFFER = 6,
+    VK_OBJECT_TYPE_FENCE = 7,
+    VK_OBJECT_TYPE_DEVICE_MEMORY = 8,
+    VK_OBJECT_TYPE_BUFFER = 9,
+    VK_OBJECT_TYPE_IMAGE = 10,
+    VK_OBJECT_TYPE_EVENT = 11,
+    VK_OBJECT_TYPE_QUERY_POOL = 12,
+    VK_OBJECT_TYPE_BUFFER_VIEW = 13,
+    VK_OBJECT_TYPE_IMAGE_VIEW = 14,
+    VK_OBJECT_TYPE_SHADER_MODULE = 15,
+    VK_OBJECT_TYPE_PIPELINE_CACHE = 16,
+    VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17,
+    VK_OBJECT_TYPE_RENDER_PASS = 18,
+    VK_OBJECT_TYPE_PIPELINE = 19,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20,
+    VK_OBJECT_TYPE_SAMPLER = 21,
+    VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET = 23,
+    VK_OBJECT_TYPE_FRAMEBUFFER = 24,
+    VK_OBJECT_TYPE_COMMAND_POOL = 25,
+    VK_OBJECT_TYPE_SURFACE_KHR = 1000000000,
+    VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000,
+    VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000,
+    VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001,
+    VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000,
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = 1000085000,
+    VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000,
+    VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001,
+    VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN,
+    VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL,
+    VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1),
+    VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkObjectType;
+
+typedef VkFlags VkInstanceCreateFlags;
+
+typedef enum VkFormatFeatureFlagBits {
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004,
+    VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020,
+    VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100,
+    VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200,
+    VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400,
+    VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000,
+    VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = 0x00004000,
+    VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = 0x00008000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000,
+    VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFormatFeatureFlagBits;
+typedef VkFlags VkFormatFeatureFlags;
+
+typedef enum VkImageUsageFlagBits {
+    VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004,
+    VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008,
+    VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010,
+    VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020,
+    VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040,
+    VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080,
+    VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageUsageFlagBits;
+typedef VkFlags VkImageUsageFlags;
+
+typedef enum VkImageCreateFlagBits {
+    VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008,
+    VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010,
+    VK_IMAGE_CREATE_BIND_SFR_BIT_KHX = 0x00000040,
+    VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = 0x00000020,
+    VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageCreateFlagBits;
+typedef VkFlags VkImageCreateFlags;
+
+typedef enum VkSampleCountFlagBits {
+    VK_SAMPLE_COUNT_1_BIT = 0x00000001,
+    VK_SAMPLE_COUNT_2_BIT = 0x00000002,
+    VK_SAMPLE_COUNT_4_BIT = 0x00000004,
+    VK_SAMPLE_COUNT_8_BIT = 0x00000008,
+    VK_SAMPLE_COUNT_16_BIT = 0x00000010,
+    VK_SAMPLE_COUNT_32_BIT = 0x00000020,
+    VK_SAMPLE_COUNT_64_BIT = 0x00000040,
+    VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSampleCountFlagBits;
+typedef VkFlags VkSampleCountFlags;
+
+typedef enum VkQueueFlagBits {
+    VK_QUEUE_GRAPHICS_BIT = 0x00000001,
+    VK_QUEUE_COMPUTE_BIT = 0x00000002,
+    VK_QUEUE_TRANSFER_BIT = 0x00000004,
+    VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008,
+    VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueueFlagBits;
+typedef VkFlags VkQueueFlags;
+
+typedef enum VkMemoryPropertyFlagBits {
+    VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002,
+    VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004,
+    VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008,
+    VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010,
+    VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryPropertyFlagBits;
+typedef VkFlags VkMemoryPropertyFlags;
+
+typedef enum VkMemoryHeapFlagBits {
+    VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHX = 0x00000002,
+    VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryHeapFlagBits;
+typedef VkFlags VkMemoryHeapFlags;
+typedef VkFlags VkDeviceCreateFlags;
+typedef VkFlags VkDeviceQueueCreateFlags;
+
+typedef enum VkPipelineStageFlagBits {
+    VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001,
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002,
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004,
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008,
+    VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010,
+    VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020,
+    VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080,
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100,
+    VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200,
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400,
+    VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800,
+    VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000,
+    VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000,
+    VK_PIPELINE_STAGE_HOST_BIT = 0x00004000,
+    VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000,
+    VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000,
+    VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000,
+    VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineStageFlagBits;
+typedef VkFlags VkPipelineStageFlags;
+typedef VkFlags VkMemoryMapFlags;
+
+typedef enum VkImageAspectFlagBits {
+    VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001,
+    VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002,
+    VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004,
+    VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008,
+    VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageAspectFlagBits;
+typedef VkFlags VkImageAspectFlags;
+
+typedef enum VkSparseImageFormatFlagBits {
+    VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001,
+    VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002,
+    VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004,
+    VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseImageFormatFlagBits;
+typedef VkFlags VkSparseImageFormatFlags;
+
+typedef enum VkSparseMemoryBindFlagBits {
+    VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001,
+    VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseMemoryBindFlagBits;
+typedef VkFlags VkSparseMemoryBindFlags;
+
+typedef enum VkFenceCreateFlagBits {
+    VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001,
+    VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFenceCreateFlagBits;
+typedef VkFlags VkFenceCreateFlags;
+typedef VkFlags VkSemaphoreCreateFlags;
+typedef VkFlags VkEventCreateFlags;
+typedef VkFlags VkQueryPoolCreateFlags;
+
+typedef enum VkQueryPipelineStatisticFlagBits {
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001,
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002,
+    VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040,
+    VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200,
+    VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400,
+    VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryPipelineStatisticFlagBits;
+typedef VkFlags VkQueryPipelineStatisticFlags;
+
+typedef enum VkQueryResultFlagBits {
+    VK_QUERY_RESULT_64_BIT = 0x00000001,
+    VK_QUERY_RESULT_WAIT_BIT = 0x00000002,
+    VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004,
+    VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008,
+    VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryResultFlagBits;
+typedef VkFlags VkQueryResultFlags;
+
+typedef enum VkBufferCreateFlagBits {
+    VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferCreateFlagBits;
+typedef VkFlags VkBufferCreateFlags;
+
+typedef enum VkBufferUsageFlagBits {
+    VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004,
+    VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010,
+    VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020,
+    VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040,
+    VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080,
+    VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100,
+    VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferUsageFlagBits;
+typedef VkFlags VkBufferUsageFlags;
+typedef VkFlags VkBufferViewCreateFlags;
+typedef VkFlags VkImageViewCreateFlags;
+typedef VkFlags VkShaderModuleCreateFlags;
+typedef VkFlags VkPipelineCacheCreateFlags;
+
+typedef enum VkPipelineCreateFlagBits {
+    VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001,
+    VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002,
+    VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004,
+    VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHX = 0x00000008,
+    VK_PIPELINE_CREATE_DISPATCH_BASE_KHX = 0x00000010,
+    VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCreateFlagBits;
+typedef VkFlags VkPipelineCreateFlags;
+typedef VkFlags VkPipelineShaderStageCreateFlags;
+
+typedef enum VkShaderStageFlagBits {
+    VK_SHADER_STAGE_VERTEX_BIT = 0x00000001,
+    VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002,
+    VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004,
+    VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008,
+    VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010,
+    VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020,
+    VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F,
+    VK_SHADER_STAGE_ALL = 0x7FFFFFFF,
+    VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkShaderStageFlagBits;
+typedef VkFlags VkPipelineVertexInputStateCreateFlags;
+typedef VkFlags VkPipelineInputAssemblyStateCreateFlags;
+typedef VkFlags VkPipelineTessellationStateCreateFlags;
+typedef VkFlags VkPipelineViewportStateCreateFlags;
+typedef VkFlags VkPipelineRasterizationStateCreateFlags;
+
+typedef enum VkCullModeFlagBits {
+    VK_CULL_MODE_NONE = 0,
+    VK_CULL_MODE_FRONT_BIT = 0x00000001,
+    VK_CULL_MODE_BACK_BIT = 0x00000002,
+    VK_CULL_MODE_FRONT_AND_BACK = 0x00000003,
+    VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCullModeFlagBits;
+typedef VkFlags VkCullModeFlags;
+typedef VkFlags VkPipelineMultisampleStateCreateFlags;
+typedef VkFlags VkPipelineDepthStencilStateCreateFlags;
+typedef VkFlags VkPipelineColorBlendStateCreateFlags;
+
+typedef enum VkColorComponentFlagBits {
+    VK_COLOR_COMPONENT_R_BIT = 0x00000001,
+    VK_COLOR_COMPONENT_G_BIT = 0x00000002,
+    VK_COLOR_COMPONENT_B_BIT = 0x00000004,
+    VK_COLOR_COMPONENT_A_BIT = 0x00000008,
+    VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkColorComponentFlagBits;
+typedef VkFlags VkColorComponentFlags;
+typedef VkFlags VkPipelineDynamicStateCreateFlags;
+typedef VkFlags VkPipelineLayoutCreateFlags;
+typedef VkFlags VkShaderStageFlags;
+typedef VkFlags VkSamplerCreateFlags;
+
+typedef enum VkDescriptorSetLayoutCreateFlagBits {
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorSetLayoutCreateFlagBits;
+typedef VkFlags VkDescriptorSetLayoutCreateFlags;
+
+typedef enum VkDescriptorPoolCreateFlagBits {
+    VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001,
+    VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorPoolCreateFlagBits;
+typedef VkFlags VkDescriptorPoolCreateFlags;
+typedef VkFlags VkDescriptorPoolResetFlags;
+typedef VkFlags VkFramebufferCreateFlags;
+typedef VkFlags VkRenderPassCreateFlags;
+
+typedef enum VkAttachmentDescriptionFlagBits {
+    VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001,
+    VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentDescriptionFlagBits;
+typedef VkFlags VkAttachmentDescriptionFlags;
+
+typedef enum VkSubpassDescriptionFlagBits {
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001,
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002,
+    VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassDescriptionFlagBits;
+typedef VkFlags VkSubpassDescriptionFlags;
+
+typedef enum VkAccessFlagBits {
+    VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001,
+    VK_ACCESS_INDEX_READ_BIT = 0x00000002,
+    VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004,
+    VK_ACCESS_UNIFORM_READ_BIT = 0x00000008,
+    VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010,
+    VK_ACCESS_SHADER_READ_BIT = 0x00000020,
+    VK_ACCESS_SHADER_WRITE_BIT = 0x00000040,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080,
+    VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400,
+    VK_ACCESS_TRANSFER_READ_BIT = 0x00000800,
+    VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000,
+    VK_ACCESS_HOST_READ_BIT = 0x00002000,
+    VK_ACCESS_HOST_WRITE_BIT = 0x00004000,
+    VK_ACCESS_MEMORY_READ_BIT = 0x00008000,
+    VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000,
+    VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000,
+    VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000,
+    VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAccessFlagBits;
+typedef VkFlags VkAccessFlags;
+
+typedef enum VkDependencyFlagBits {
+    VK_DEPENDENCY_BY_REGION_BIT = 0x00000001,
+    VK_DEPENDENCY_VIEW_LOCAL_BIT_KHX = 0x00000002,
+    VK_DEPENDENCY_DEVICE_GROUP_BIT_KHX = 0x00000004,
+    VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDependencyFlagBits;
+typedef VkFlags VkDependencyFlags;
+
+typedef enum VkCommandPoolCreateFlagBits {
+    VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001,
+    VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002,
+    VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolCreateFlagBits;
+typedef VkFlags VkCommandPoolCreateFlags;
+
+typedef enum VkCommandPoolResetFlagBits {
+    VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolResetFlagBits;
+typedef VkFlags VkCommandPoolResetFlags;
+
+typedef enum VkCommandBufferUsageFlagBits {
+    VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002,
+    VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004,
+    VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferUsageFlagBits;
+typedef VkFlags VkCommandBufferUsageFlags;
+
+typedef enum VkQueryControlFlagBits {
+    VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001,
+    VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryControlFlagBits;
+typedef VkFlags VkQueryControlFlags;
+
+typedef enum VkCommandBufferResetFlagBits {
+    VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferResetFlagBits;
+typedef VkFlags VkCommandBufferResetFlags;
+
+typedef enum VkStencilFaceFlagBits {
+    VK_STENCIL_FACE_FRONT_BIT = 0x00000001,
+    VK_STENCIL_FACE_BACK_BIT = 0x00000002,
+    VK_STENCIL_FRONT_AND_BACK = 0x00000003,
+    VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkStencilFaceFlagBits;
+typedef VkFlags VkStencilFaceFlags;
+
+typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)(
+    void*                                       pUserData,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)(
+    void*                                       pUserData,
+    void*                                       pOriginal,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkFreeFunction)(
+    void*                                       pUserData,
+    void*                                       pMemory);
+
+typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void);
+
+typedef struct VkApplicationInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pApplicationName;
+    uint32_t           applicationVersion;
+    const char*        pEngineName;
+    uint32_t           engineVersion;
+    uint32_t           apiVersion;
+} VkApplicationInfo;
+
+typedef struct VkInstanceCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkInstanceCreateFlags       flags;
+    const VkApplicationInfo*    pApplicationInfo;
+    uint32_t                    enabledLayerCount;
+    const char* const*          ppEnabledLayerNames;
+    uint32_t                    enabledExtensionCount;
+    const char* const*          ppEnabledExtensionNames;
+} VkInstanceCreateInfo;
+
+typedef struct VkAllocationCallbacks {
+    void*                                   pUserData;
+    PFN_vkAllocationFunction                pfnAllocation;
+    PFN_vkReallocationFunction              pfnReallocation;
+    PFN_vkFreeFunction                      pfnFree;
+    PFN_vkInternalAllocationNotification    pfnInternalAllocation;
+    PFN_vkInternalFreeNotification          pfnInternalFree;
+} VkAllocationCallbacks;
+
+typedef struct VkPhysicalDeviceFeatures {
+    VkBool32    robustBufferAccess;
+    VkBool32    fullDrawIndexUint32;
+    VkBool32    imageCubeArray;
+    VkBool32    independentBlend;
+    VkBool32    geometryShader;
+    VkBool32    tessellationShader;
+    VkBool32    sampleRateShading;
+    VkBool32    dualSrcBlend;
+    VkBool32    logicOp;
+    VkBool32    multiDrawIndirect;
+    VkBool32    drawIndirectFirstInstance;
+    VkBool32    depthClamp;
+    VkBool32    depthBiasClamp;
+    VkBool32    fillModeNonSolid;
+    VkBool32    depthBounds;
+    VkBool32    wideLines;
+    VkBool32    largePoints;
+    VkBool32    alphaToOne;
+    VkBool32    multiViewport;
+    VkBool32    samplerAnisotropy;
+    VkBool32    textureCompressionETC2;
+    VkBool32    textureCompressionASTC_LDR;
+    VkBool32    textureCompressionBC;
+    VkBool32    occlusionQueryPrecise;
+    VkBool32    pipelineStatisticsQuery;
+    VkBool32    vertexPipelineStoresAndAtomics;
+    VkBool32    fragmentStoresAndAtomics;
+    VkBool32    shaderTessellationAndGeometryPointSize;
+    VkBool32    shaderImageGatherExtended;
+    VkBool32    shaderStorageImageExtendedFormats;
+    VkBool32    shaderStorageImageMultisample;
+    VkBool32    shaderStorageImageReadWithoutFormat;
+    VkBool32    shaderStorageImageWriteWithoutFormat;
+    VkBool32    shaderUniformBufferArrayDynamicIndexing;
+    VkBool32    shaderSampledImageArrayDynamicIndexing;
+    VkBool32    shaderStorageBufferArrayDynamicIndexing;
+    VkBool32    shaderStorageImageArrayDynamicIndexing;
+    VkBool32    shaderClipDistance;
+    VkBool32    shaderCullDistance;
+    VkBool32    shaderFloat64;
+    VkBool32    shaderInt64;
+    VkBool32    shaderInt16;
+    VkBool32    shaderResourceResidency;
+    VkBool32    shaderResourceMinLod;
+    VkBool32    sparseBinding;
+    VkBool32    sparseResidencyBuffer;
+    VkBool32    sparseResidencyImage2D;
+    VkBool32    sparseResidencyImage3D;
+    VkBool32    sparseResidency2Samples;
+    VkBool32    sparseResidency4Samples;
+    VkBool32    sparseResidency8Samples;
+    VkBool32    sparseResidency16Samples;
+    VkBool32    sparseResidencyAliased;
+    VkBool32    variableMultisampleRate;
+    VkBool32    inheritedQueries;
+} VkPhysicalDeviceFeatures;
+
+typedef struct VkFormatProperties {
+    VkFormatFeatureFlags    linearTilingFeatures;
+    VkFormatFeatureFlags    optimalTilingFeatures;
+    VkFormatFeatureFlags    bufferFeatures;
+} VkFormatProperties;
+
+typedef struct VkExtent3D {
+    uint32_t    width;
+    uint32_t    height;
+    uint32_t    depth;
+} VkExtent3D;
+
+typedef struct VkImageFormatProperties {
+    VkExtent3D            maxExtent;
+    uint32_t              maxMipLevels;
+    uint32_t              maxArrayLayers;
+    VkSampleCountFlags    sampleCounts;
+    VkDeviceSize          maxResourceSize;
+} VkImageFormatProperties;
+
+typedef struct VkPhysicalDeviceLimits {
+    uint32_t              maxImageDimension1D;
+    uint32_t              maxImageDimension2D;
+    uint32_t              maxImageDimension3D;
+    uint32_t              maxImageDimensionCube;
+    uint32_t              maxImageArrayLayers;
+    uint32_t              maxTexelBufferElements;
+    uint32_t              maxUniformBufferRange;
+    uint32_t              maxStorageBufferRange;
+    uint32_t              maxPushConstantsSize;
+    uint32_t              maxMemoryAllocationCount;
+    uint32_t              maxSamplerAllocationCount;
+    VkDeviceSize          bufferImageGranularity;
+    VkDeviceSize          sparseAddressSpaceSize;
+    uint32_t              maxBoundDescriptorSets;
+    uint32_t              maxPerStageDescriptorSamplers;
+    uint32_t              maxPerStageDescriptorUniformBuffers;
+    uint32_t              maxPerStageDescriptorStorageBuffers;
+    uint32_t              maxPerStageDescriptorSampledImages;
+    uint32_t              maxPerStageDescriptorStorageImages;
+    uint32_t              maxPerStageDescriptorInputAttachments;
+    uint32_t              maxPerStageResources;
+    uint32_t              maxDescriptorSetSamplers;
+    uint32_t              maxDescriptorSetUniformBuffers;
+    uint32_t              maxDescriptorSetUniformBuffersDynamic;
+    uint32_t              maxDescriptorSetStorageBuffers;
+    uint32_t              maxDescriptorSetStorageBuffersDynamic;
+    uint32_t              maxDescriptorSetSampledImages;
+    uint32_t              maxDescriptorSetStorageImages;
+    uint32_t              maxDescriptorSetInputAttachments;
+    uint32_t              maxVertexInputAttributes;
+    uint32_t              maxVertexInputBindings;
+    uint32_t              maxVertexInputAttributeOffset;
+    uint32_t              maxVertexInputBindingStride;
+    uint32_t              maxVertexOutputComponents;
+    uint32_t              maxTessellationGenerationLevel;
+    uint32_t              maxTessellationPatchSize;
+    uint32_t              maxTessellationControlPerVertexInputComponents;
+    uint32_t              maxTessellationControlPerVertexOutputComponents;
+    uint32_t              maxTessellationControlPerPatchOutputComponents;
+    uint32_t              maxTessellationControlTotalOutputComponents;
+    uint32_t              maxTessellationEvaluationInputComponents;
+    uint32_t              maxTessellationEvaluationOutputComponents;
+    uint32_t              maxGeometryShaderInvocations;
+    uint32_t              maxGeometryInputComponents;
+    uint32_t              maxGeometryOutputComponents;
+    uint32_t              maxGeometryOutputVertices;
+    uint32_t              maxGeometryTotalOutputComponents;
+    uint32_t              maxFragmentInputComponents;
+    uint32_t              maxFragmentOutputAttachments;
+    uint32_t              maxFragmentDualSrcAttachments;
+    uint32_t              maxFragmentCombinedOutputResources;
+    uint32_t              maxComputeSharedMemorySize;
+    uint32_t              maxComputeWorkGroupCount[3];
+    uint32_t              maxComputeWorkGroupInvocations;
+    uint32_t              maxComputeWorkGroupSize[3];
+    uint32_t              subPixelPrecisionBits;
+    uint32_t              subTexelPrecisionBits;
+    uint32_t              mipmapPrecisionBits;
+    uint32_t              maxDrawIndexedIndexValue;
+    uint32_t              maxDrawIndirectCount;
+    float                 maxSamplerLodBias;
+    float                 maxSamplerAnisotropy;
+    uint32_t              maxViewports;
+    uint32_t              maxViewportDimensions[2];
+    float                 viewportBoundsRange[2];
+    uint32_t              viewportSubPixelBits;
+    size_t                minMemoryMapAlignment;
+    VkDeviceSize          minTexelBufferOffsetAlignment;
+    VkDeviceSize          minUniformBufferOffsetAlignment;
+    VkDeviceSize          minStorageBufferOffsetAlignment;
+    int32_t               minTexelOffset;
+    uint32_t              maxTexelOffset;
+    int32_t               minTexelGatherOffset;
+    uint32_t              maxTexelGatherOffset;
+    float                 minInterpolationOffset;
+    float                 maxInterpolationOffset;
+    uint32_t              subPixelInterpolationOffsetBits;
+    uint32_t              maxFramebufferWidth;
+    uint32_t              maxFramebufferHeight;
+    uint32_t              maxFramebufferLayers;
+    VkSampleCountFlags    framebufferColorSampleCounts;
+    VkSampleCountFlags    framebufferDepthSampleCounts;
+    VkSampleCountFlags    framebufferStencilSampleCounts;
+    VkSampleCountFlags    framebufferNoAttachmentsSampleCounts;
+    uint32_t              maxColorAttachments;
+    VkSampleCountFlags    sampledImageColorSampleCounts;
+    VkSampleCountFlags    sampledImageIntegerSampleCounts;
+    VkSampleCountFlags    sampledImageDepthSampleCounts;
+    VkSampleCountFlags    sampledImageStencilSampleCounts;
+    VkSampleCountFlags    storageImageSampleCounts;
+    uint32_t              maxSampleMaskWords;
+    VkBool32              timestampComputeAndGraphics;
+    float                 timestampPeriod;
+    uint32_t              maxClipDistances;
+    uint32_t              maxCullDistances;
+    uint32_t              maxCombinedClipAndCullDistances;
+    uint32_t              discreteQueuePriorities;
+    float                 pointSizeRange[2];
+    float                 lineWidthRange[2];
+    float                 pointSizeGranularity;
+    float                 lineWidthGranularity;
+    VkBool32              strictLines;
+    VkBool32              standardSampleLocations;
+    VkDeviceSize          optimalBufferCopyOffsetAlignment;
+    VkDeviceSize          optimalBufferCopyRowPitchAlignment;
+    VkDeviceSize          nonCoherentAtomSize;
+} VkPhysicalDeviceLimits;
+
+typedef struct VkPhysicalDeviceSparseProperties {
+    VkBool32    residencyStandard2DBlockShape;
+    VkBool32    residencyStandard2DMultisampleBlockShape;
+    VkBool32    residencyStandard3DBlockShape;
+    VkBool32    residencyAlignedMipSize;
+    VkBool32    residencyNonResidentStrict;
+} VkPhysicalDeviceSparseProperties;
+
+typedef struct VkPhysicalDeviceProperties {
+    uint32_t                            apiVersion;
+    uint32_t                            driverVersion;
+    uint32_t                            vendorID;
+    uint32_t                            deviceID;
+    VkPhysicalDeviceType                deviceType;
+    char                                deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+    uint8_t                             pipelineCacheUUID[VK_UUID_SIZE];
+    VkPhysicalDeviceLimits              limits;
+    VkPhysicalDeviceSparseProperties    sparseProperties;
+} VkPhysicalDeviceProperties;
+
+typedef struct VkQueueFamilyProperties {
+    VkQueueFlags    queueFlags;
+    uint32_t        queueCount;
+    uint32_t        timestampValidBits;
+    VkExtent3D      minImageTransferGranularity;
+} VkQueueFamilyProperties;
+
+typedef struct VkMemoryType {
+    VkMemoryPropertyFlags    propertyFlags;
+    uint32_t                 heapIndex;
+} VkMemoryType;
+
+typedef struct VkMemoryHeap {
+    VkDeviceSize         size;
+    VkMemoryHeapFlags    flags;
+} VkMemoryHeap;
+
+typedef struct VkPhysicalDeviceMemoryProperties {
+    uint32_t        memoryTypeCount;
+    VkMemoryType    memoryTypes[VK_MAX_MEMORY_TYPES];
+    uint32_t        memoryHeapCount;
+    VkMemoryHeap    memoryHeaps[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryProperties;
+
+typedef struct VkDeviceQueueCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceQueueCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+    uint32_t                    queueCount;
+    const float*                pQueuePriorities;
+} VkDeviceQueueCreateInfo;
+
+typedef struct VkDeviceCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkDeviceCreateFlags                flags;
+    uint32_t                           queueCreateInfoCount;
+    const VkDeviceQueueCreateInfo*     pQueueCreateInfos;
+    uint32_t                           enabledLayerCount;
+    const char* const*                 ppEnabledLayerNames;
+    uint32_t                           enabledExtensionCount;
+    const char* const*                 ppEnabledExtensionNames;
+    const VkPhysicalDeviceFeatures*    pEnabledFeatures;
+} VkDeviceCreateInfo;
+
+typedef struct VkExtensionProperties {
+    char        extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+} VkExtensionProperties;
+
+typedef struct VkLayerProperties {
+    char        layerName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+    uint32_t    implementationVersion;
+    char        description[VK_MAX_DESCRIPTION_SIZE];
+} VkLayerProperties;
+
+typedef struct VkSubmitInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    uint32_t                       waitSemaphoreCount;
+    const VkSemaphore*             pWaitSemaphores;
+    const VkPipelineStageFlags*    pWaitDstStageMask;
+    uint32_t                       commandBufferCount;
+    const VkCommandBuffer*         pCommandBuffers;
+    uint32_t                       signalSemaphoreCount;
+    const VkSemaphore*             pSignalSemaphores;
+} VkSubmitInfo;
+
+typedef struct VkMemoryAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeIndex;
+} VkMemoryAllocateInfo;
+
+typedef struct VkMappedMemoryRange {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkMappedMemoryRange;
+
+typedef struct VkMemoryRequirements {
+    VkDeviceSize    size;
+    VkDeviceSize    alignment;
+    uint32_t        memoryTypeBits;
+} VkMemoryRequirements;
+
+typedef struct VkSparseImageFormatProperties {
+    VkImageAspectFlags          aspectMask;
+    VkExtent3D                  imageGranularity;
+    VkSparseImageFormatFlags    flags;
+} VkSparseImageFormatProperties;
+
+typedef struct VkSparseImageMemoryRequirements {
+    VkSparseImageFormatProperties    formatProperties;
+    uint32_t                         imageMipTailFirstLod;
+    VkDeviceSize                     imageMipTailSize;
+    VkDeviceSize                     imageMipTailOffset;
+    VkDeviceSize                     imageMipTailStride;
+} VkSparseImageMemoryRequirements;
+
+typedef struct VkSparseMemoryBind {
+    VkDeviceSize               resourceOffset;
+    VkDeviceSize               size;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseMemoryBind;
+
+typedef struct VkSparseBufferMemoryBindInfo {
+    VkBuffer                     buffer;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseBufferMemoryBindInfo;
+
+typedef struct VkSparseImageOpaqueMemoryBindInfo {
+    VkImage                      image;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseImageOpaqueMemoryBindInfo;
+
+typedef struct VkImageSubresource {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              arrayLayer;
+} VkImageSubresource;
+
+typedef struct VkOffset3D {
+    int32_t    x;
+    int32_t    y;
+    int32_t    z;
+} VkOffset3D;
+
+typedef struct VkSparseImageMemoryBind {
+    VkImageSubresource         subresource;
+    VkOffset3D                 offset;
+    VkExtent3D                 extent;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseImageMemoryBind;
+
+typedef struct VkSparseImageMemoryBindInfo {
+    VkImage                           image;
+    uint32_t                          bindCount;
+    const VkSparseImageMemoryBind*    pBinds;
+} VkSparseImageMemoryBindInfo;
+
+typedef struct VkBindSparseInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    waitSemaphoreCount;
+    const VkSemaphore*                          pWaitSemaphores;
+    uint32_t                                    bufferBindCount;
+    const VkSparseBufferMemoryBindInfo*         pBufferBinds;
+    uint32_t                                    imageOpaqueBindCount;
+    const VkSparseImageOpaqueMemoryBindInfo*    pImageOpaqueBinds;
+    uint32_t                                    imageBindCount;
+    const VkSparseImageMemoryBindInfo*          pImageBinds;
+    uint32_t                                    signalSemaphoreCount;
+    const VkSemaphore*                          pSignalSemaphores;
+} VkBindSparseInfo;
+
+typedef struct VkFenceCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFenceCreateFlags    flags;
+} VkFenceCreateInfo;
+
+typedef struct VkSemaphoreCreateInfo {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkSemaphoreCreateFlags    flags;
+} VkSemaphoreCreateInfo;
+
+typedef struct VkEventCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkEventCreateFlags    flags;
+} VkEventCreateInfo;
+
+typedef struct VkQueryPoolCreateInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkQueryPoolCreateFlags           flags;
+    VkQueryType                      queryType;
+    uint32_t                         queryCount;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkQueryPoolCreateInfo;
+
+typedef struct VkBufferCreateInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkBufferCreateFlags    flags;
+    VkDeviceSize           size;
+    VkBufferUsageFlags     usage;
+    VkSharingMode          sharingMode;
+    uint32_t               queueFamilyIndexCount;
+    const uint32_t*        pQueueFamilyIndices;
+} VkBufferCreateInfo;
+
+typedef struct VkBufferViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkBufferViewCreateFlags    flags;
+    VkBuffer                   buffer;
+    VkFormat                   format;
+    VkDeviceSize               offset;
+    VkDeviceSize               range;
+} VkBufferViewCreateInfo;
+
+typedef struct VkImageCreateInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageCreateFlags       flags;
+    VkImageType              imageType;
+    VkFormat                 format;
+    VkExtent3D               extent;
+    uint32_t                 mipLevels;
+    uint32_t                 arrayLayers;
+    VkSampleCountFlagBits    samples;
+    VkImageTiling            tiling;
+    VkImageUsageFlags        usage;
+    VkSharingMode            sharingMode;
+    uint32_t                 queueFamilyIndexCount;
+    const uint32_t*          pQueueFamilyIndices;
+    VkImageLayout            initialLayout;
+} VkImageCreateInfo;
+
+typedef struct VkSubresourceLayout {
+    VkDeviceSize    offset;
+    VkDeviceSize    size;
+    VkDeviceSize    rowPitch;
+    VkDeviceSize    arrayPitch;
+    VkDeviceSize    depthPitch;
+} VkSubresourceLayout;
+
+typedef struct VkComponentMapping {
+    VkComponentSwizzle    r;
+    VkComponentSwizzle    g;
+    VkComponentSwizzle    b;
+    VkComponentSwizzle    a;
+} VkComponentMapping;
+
+typedef struct VkImageSubresourceRange {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              baseMipLevel;
+    uint32_t              levelCount;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceRange;
+
+typedef struct VkImageViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkImageViewCreateFlags     flags;
+    VkImage                    image;
+    VkImageViewType            viewType;
+    VkFormat                   format;
+    VkComponentMapping         components;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageViewCreateInfo;
+
+typedef struct VkShaderModuleCreateInfo {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkShaderModuleCreateFlags    flags;
+    size_t                       codeSize;
+    const uint32_t*              pCode;
+} VkShaderModuleCreateInfo;
+
+typedef struct VkPipelineCacheCreateInfo {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkPipelineCacheCreateFlags    flags;
+    size_t                        initialDataSize;
+    const void*                   pInitialData;
+} VkPipelineCacheCreateInfo;
+
+typedef struct VkSpecializationMapEntry {
+    uint32_t    constantID;
+    uint32_t    offset;
+    size_t      size;
+} VkSpecializationMapEntry;
+
+typedef struct VkSpecializationInfo {
+    uint32_t                           mapEntryCount;
+    const VkSpecializationMapEntry*    pMapEntries;
+    size_t                             dataSize;
+    const void*                        pData;
+} VkSpecializationInfo;
+
+typedef struct VkPipelineShaderStageCreateInfo {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkPipelineShaderStageCreateFlags    flags;
+    VkShaderStageFlagBits               stage;
+    VkShaderModule                      module;
+    const char*                         pName;
+    const VkSpecializationInfo*         pSpecializationInfo;
+} VkPipelineShaderStageCreateInfo;
+
+typedef struct VkVertexInputBindingDescription {
+    uint32_t             binding;
+    uint32_t             stride;
+    VkVertexInputRate    inputRate;
+} VkVertexInputBindingDescription;
+
+typedef struct VkVertexInputAttributeDescription {
+    uint32_t    location;
+    uint32_t    binding;
+    VkFormat    format;
+    uint32_t    offset;
+} VkVertexInputAttributeDescription;
+
+typedef struct VkPipelineVertexInputStateCreateInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkPipelineVertexInputStateCreateFlags       flags;
+    uint32_t                                    vertexBindingDescriptionCount;
+    const VkVertexInputBindingDescription*      pVertexBindingDescriptions;
+    uint32_t                                    vertexAttributeDescriptionCount;
+    const VkVertexInputAttributeDescription*    pVertexAttributeDescriptions;
+} VkPipelineVertexInputStateCreateInfo;
+
+typedef struct VkPipelineInputAssemblyStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineInputAssemblyStateCreateFlags    flags;
+    VkPrimitiveTopology                        topology;
+    VkBool32                                   primitiveRestartEnable;
+} VkPipelineInputAssemblyStateCreateInfo;
+
+typedef struct VkPipelineTessellationStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineTessellationStateCreateFlags    flags;
+    uint32_t                                  patchControlPoints;
+} VkPipelineTessellationStateCreateInfo;
+
+typedef struct VkViewport {
+    float    x;
+    float    y;
+    float    width;
+    float    height;
+    float    minDepth;
+    float    maxDepth;
+} VkViewport;
+
+typedef struct VkOffset2D {
+    int32_t    x;
+    int32_t    y;
+} VkOffset2D;
+
+typedef struct VkExtent2D {
+    uint32_t    width;
+    uint32_t    height;
+} VkExtent2D;
+
+typedef struct VkRect2D {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+} VkRect2D;
+
+typedef struct VkPipelineViewportStateCreateInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkPipelineViewportStateCreateFlags    flags;
+    uint32_t                              viewportCount;
+    const VkViewport*                     pViewports;
+    uint32_t                              scissorCount;
+    const VkRect2D*                       pScissors;
+} VkPipelineViewportStateCreateInfo;
+
+typedef struct VkPipelineRasterizationStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineRasterizationStateCreateFlags    flags;
+    VkBool32                                   depthClampEnable;
+    VkBool32                                   rasterizerDiscardEnable;
+    VkPolygonMode                              polygonMode;
+    VkCullModeFlags                            cullMode;
+    VkFrontFace                                frontFace;
+    VkBool32                                   depthBiasEnable;
+    float                                      depthBiasConstantFactor;
+    float                                      depthBiasClamp;
+    float                                      depthBiasSlopeFactor;
+    float                                      lineWidth;
+} VkPipelineRasterizationStateCreateInfo;
+
+typedef struct VkPipelineMultisampleStateCreateInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkPipelineMultisampleStateCreateFlags    flags;
+    VkSampleCountFlagBits                    rasterizationSamples;
+    VkBool32                                 sampleShadingEnable;
+    float                                    minSampleShading;
+    const VkSampleMask*                      pSampleMask;
+    VkBool32                                 alphaToCoverageEnable;
+    VkBool32                                 alphaToOneEnable;
+} VkPipelineMultisampleStateCreateInfo;
+
+typedef struct VkStencilOpState {
+    VkStencilOp    failOp;
+    VkStencilOp    passOp;
+    VkStencilOp    depthFailOp;
+    VkCompareOp    compareOp;
+    uint32_t       compareMask;
+    uint32_t       writeMask;
+    uint32_t       reference;
+} VkStencilOpState;
+
+typedef struct VkPipelineDepthStencilStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineDepthStencilStateCreateFlags    flags;
+    VkBool32                                  depthTestEnable;
+    VkBool32                                  depthWriteEnable;
+    VkCompareOp                               depthCompareOp;
+    VkBool32                                  depthBoundsTestEnable;
+    VkBool32                                  stencilTestEnable;
+    VkStencilOpState                          front;
+    VkStencilOpState                          back;
+    float                                     minDepthBounds;
+    float                                     maxDepthBounds;
+} VkPipelineDepthStencilStateCreateInfo;
+
+typedef struct VkPipelineColorBlendAttachmentState {
+    VkBool32                 blendEnable;
+    VkBlendFactor            srcColorBlendFactor;
+    VkBlendFactor            dstColorBlendFactor;
+    VkBlendOp                colorBlendOp;
+    VkBlendFactor            srcAlphaBlendFactor;
+    VkBlendFactor            dstAlphaBlendFactor;
+    VkBlendOp                alphaBlendOp;
+    VkColorComponentFlags    colorWriteMask;
+} VkPipelineColorBlendAttachmentState;
+
+typedef struct VkPipelineColorBlendStateCreateInfo {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    VkPipelineColorBlendStateCreateFlags          flags;
+    VkBool32                                      logicOpEnable;
+    VkLogicOp                                     logicOp;
+    uint32_t                                      attachmentCount;
+    const VkPipelineColorBlendAttachmentState*    pAttachments;
+    float                                         blendConstants[4];
+} VkPipelineColorBlendStateCreateInfo;
+
+typedef struct VkPipelineDynamicStateCreateInfo {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineDynamicStateCreateFlags    flags;
+    uint32_t                             dynamicStateCount;
+    const VkDynamicState*                pDynamicStates;
+} VkPipelineDynamicStateCreateInfo;
+
+typedef struct VkGraphicsPipelineCreateInfo {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineCreateFlags                            flags;
+    uint32_t                                         stageCount;
+    const VkPipelineShaderStageCreateInfo*           pStages;
+    const VkPipelineVertexInputStateCreateInfo*      pVertexInputState;
+    const VkPipelineInputAssemblyStateCreateInfo*    pInputAssemblyState;
+    const VkPipelineTessellationStateCreateInfo*     pTessellationState;
+    const VkPipelineViewportStateCreateInfo*         pViewportState;
+    const VkPipelineRasterizationStateCreateInfo*    pRasterizationState;
+    const VkPipelineMultisampleStateCreateInfo*      pMultisampleState;
+    const VkPipelineDepthStencilStateCreateInfo*     pDepthStencilState;
+    const VkPipelineColorBlendStateCreateInfo*       pColorBlendState;
+    const VkPipelineDynamicStateCreateInfo*          pDynamicState;
+    VkPipelineLayout                                 layout;
+    VkRenderPass                                     renderPass;
+    uint32_t                                         subpass;
+    VkPipeline                                       basePipelineHandle;
+    int32_t                                          basePipelineIndex;
+} VkGraphicsPipelineCreateInfo;
+
+typedef struct VkComputePipelineCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkPipelineCreateFlags              flags;
+    VkPipelineShaderStageCreateInfo    stage;
+    VkPipelineLayout                   layout;
+    VkPipeline                         basePipelineHandle;
+    int32_t                            basePipelineIndex;
+} VkComputePipelineCreateInfo;
+
+typedef struct VkPushConstantRange {
+    VkShaderStageFlags    stageFlags;
+    uint32_t              offset;
+    uint32_t              size;
+} VkPushConstantRange;
+
+typedef struct VkPipelineLayoutCreateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkPipelineLayoutCreateFlags     flags;
+    uint32_t                        setLayoutCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+    uint32_t                        pushConstantRangeCount;
+    const VkPushConstantRange*      pPushConstantRanges;
+} VkPipelineLayoutCreateInfo;
+
+typedef struct VkSamplerCreateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkSamplerCreateFlags    flags;
+    VkFilter                magFilter;
+    VkFilter                minFilter;
+    VkSamplerMipmapMode     mipmapMode;
+    VkSamplerAddressMode    addressModeU;
+    VkSamplerAddressMode    addressModeV;
+    VkSamplerAddressMode    addressModeW;
+    float                   mipLodBias;
+    VkBool32                anisotropyEnable;
+    float                   maxAnisotropy;
+    VkBool32                compareEnable;
+    VkCompareOp             compareOp;
+    float                   minLod;
+    float                   maxLod;
+    VkBorderColor           borderColor;
+    VkBool32                unnormalizedCoordinates;
+} VkSamplerCreateInfo;
+
+typedef struct VkDescriptorSetLayoutBinding {
+    uint32_t              binding;
+    VkDescriptorType      descriptorType;
+    uint32_t              descriptorCount;
+    VkShaderStageFlags    stageFlags;
+    const VkSampler*      pImmutableSamplers;
+} VkDescriptorSetLayoutBinding;
+
+typedef struct VkDescriptorSetLayoutCreateInfo {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkDescriptorSetLayoutCreateFlags       flags;
+    uint32_t                               bindingCount;
+    const VkDescriptorSetLayoutBinding*    pBindings;
+} VkDescriptorSetLayoutCreateInfo;
+
+typedef struct VkDescriptorPoolSize {
+    VkDescriptorType    type;
+    uint32_t            descriptorCount;
+} VkDescriptorPoolSize;
+
+typedef struct VkDescriptorPoolCreateInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDescriptorPoolCreateFlags    flags;
+    uint32_t                       maxSets;
+    uint32_t                       poolSizeCount;
+    const VkDescriptorPoolSize*    pPoolSizes;
+} VkDescriptorPoolCreateInfo;
+
+typedef struct VkDescriptorSetAllocateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDescriptorPool                descriptorPool;
+    uint32_t                        descriptorSetCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+} VkDescriptorSetAllocateInfo;
+
+typedef struct VkDescriptorImageInfo {
+    VkSampler        sampler;
+    VkImageView      imageView;
+    VkImageLayout    imageLayout;
+} VkDescriptorImageInfo;
+
+typedef struct VkDescriptorBufferInfo {
+    VkBuffer        buffer;
+    VkDeviceSize    offset;
+    VkDeviceSize    range;
+} VkDescriptorBufferInfo;
+
+typedef struct VkWriteDescriptorSet {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDescriptorSet                  dstSet;
+    uint32_t                         dstBinding;
+    uint32_t                         dstArrayElement;
+    uint32_t                         descriptorCount;
+    VkDescriptorType                 descriptorType;
+    const VkDescriptorImageInfo*     pImageInfo;
+    const VkDescriptorBufferInfo*    pBufferInfo;
+    const VkBufferView*              pTexelBufferView;
+} VkWriteDescriptorSet;
+
+typedef struct VkCopyDescriptorSet {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDescriptorSet    srcSet;
+    uint32_t           srcBinding;
+    uint32_t           srcArrayElement;
+    VkDescriptorSet    dstSet;
+    uint32_t           dstBinding;
+    uint32_t           dstArrayElement;
+    uint32_t           descriptorCount;
+} VkCopyDescriptorSet;
+
+typedef struct VkFramebufferCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkFramebufferCreateFlags    flags;
+    VkRenderPass                renderPass;
+    uint32_t                    attachmentCount;
+    const VkImageView*          pAttachments;
+    uint32_t                    width;
+    uint32_t                    height;
+    uint32_t                    layers;
+} VkFramebufferCreateInfo;
+
+typedef struct VkAttachmentDescription {
+    VkAttachmentDescriptionFlags    flags;
+    VkFormat                        format;
+    VkSampleCountFlagBits           samples;
+    VkAttachmentLoadOp              loadOp;
+    VkAttachmentStoreOp             storeOp;
+    VkAttachmentLoadOp              stencilLoadOp;
+    VkAttachmentStoreOp             stencilStoreOp;
+    VkImageLayout                   initialLayout;
+    VkImageLayout                   finalLayout;
+} VkAttachmentDescription;
+
+typedef struct VkAttachmentReference {
+    uint32_t         attachment;
+    VkImageLayout    layout;
+} VkAttachmentReference;
+
+typedef struct VkSubpassDescription {
+    VkSubpassDescriptionFlags       flags;
+    VkPipelineBindPoint             pipelineBindPoint;
+    uint32_t                        inputAttachmentCount;
+    const VkAttachmentReference*    pInputAttachments;
+    uint32_t                        colorAttachmentCount;
+    const VkAttachmentReference*    pColorAttachments;
+    const VkAttachmentReference*    pResolveAttachments;
+    const VkAttachmentReference*    pDepthStencilAttachment;
+    uint32_t                        preserveAttachmentCount;
+    const uint32_t*                 pPreserveAttachments;
+} VkSubpassDescription;
+
+typedef struct VkSubpassDependency {
+    uint32_t                srcSubpass;
+    uint32_t                dstSubpass;
+    VkPipelineStageFlags    srcStageMask;
+    VkPipelineStageFlags    dstStageMask;
+    VkAccessFlags           srcAccessMask;
+    VkAccessFlags           dstAccessMask;
+    VkDependencyFlags       dependencyFlags;
+} VkSubpassDependency;
+
+typedef struct VkRenderPassCreateInfo {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRenderPassCreateFlags           flags;
+    uint32_t                          attachmentCount;
+    const VkAttachmentDescription*    pAttachments;
+    uint32_t                          subpassCount;
+    const VkSubpassDescription*       pSubpasses;
+    uint32_t                          dependencyCount;
+    const VkSubpassDependency*        pDependencies;
+} VkRenderPassCreateInfo;
+
+typedef struct VkCommandPoolCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkCommandPoolCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+} VkCommandPoolCreateInfo;
+
+typedef struct VkCommandBufferAllocateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkCommandPool           commandPool;
+    VkCommandBufferLevel    level;
+    uint32_t                commandBufferCount;
+} VkCommandBufferAllocateInfo;
+
+typedef struct VkCommandBufferInheritanceInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkRenderPass                     renderPass;
+    uint32_t                         subpass;
+    VkFramebuffer                    framebuffer;
+    VkBool32                         occlusionQueryEnable;
+    VkQueryControlFlags              queryFlags;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkCommandBufferInheritanceInfo;
+
+typedef struct VkCommandBufferBeginInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkCommandBufferUsageFlags                flags;
+    const VkCommandBufferInheritanceInfo*    pInheritanceInfo;
+} VkCommandBufferBeginInfo;
+
+typedef struct VkBufferCopy {
+    VkDeviceSize    srcOffset;
+    VkDeviceSize    dstOffset;
+    VkDeviceSize    size;
+} VkBufferCopy;
+
+typedef struct VkImageSubresourceLayers {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceLayers;
+
+typedef struct VkImageCopy {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageCopy;
+
+typedef struct VkImageBlit {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffsets[2];
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffsets[2];
+} VkImageBlit;
+
+typedef struct VkBufferImageCopy {
+    VkDeviceSize                bufferOffset;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkBufferImageCopy;
+
+typedef union VkClearColorValue {
+    float       float32[4];
+    int32_t     int32[4];
+    uint32_t    uint32[4];
+} VkClearColorValue;
+
+typedef struct VkClearDepthStencilValue {
+    float       depth;
+    uint32_t    stencil;
+} VkClearDepthStencilValue;
+
+typedef union VkClearValue {
+    VkClearColorValue           color;
+    VkClearDepthStencilValue    depthStencil;
+} VkClearValue;
+
+typedef struct VkClearAttachment {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              colorAttachment;
+    VkClearValue          clearValue;
+} VkClearAttachment;
+
+typedef struct VkClearRect {
+    VkRect2D    rect;
+    uint32_t    baseArrayLayer;
+    uint32_t    layerCount;
+} VkClearRect;
+
+typedef struct VkImageResolve {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageResolve;
+
+typedef struct VkMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+} VkMemoryBarrier;
+
+typedef struct VkBufferMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+    uint32_t           srcQueueFamilyIndex;
+    uint32_t           dstQueueFamilyIndex;
+    VkBuffer           buffer;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkBufferMemoryBarrier;
+
+typedef struct VkImageMemoryBarrier {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkAccessFlags              srcAccessMask;
+    VkAccessFlags              dstAccessMask;
+    VkImageLayout              oldLayout;
+    VkImageLayout              newLayout;
+    uint32_t                   srcQueueFamilyIndex;
+    uint32_t                   dstQueueFamilyIndex;
+    VkImage                    image;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageMemoryBarrier;
+
+typedef struct VkRenderPassBeginInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkRenderPass           renderPass;
+    VkFramebuffer          framebuffer;
+    VkRect2D               renderArea;
+    uint32_t               clearValueCount;
+    const VkClearValue*    pClearValues;
+} VkRenderPassBeginInfo;
+
+typedef struct VkDispatchIndirectCommand {
+    uint32_t    x;
+    uint32_t    y;
+    uint32_t    z;
+} VkDispatchIndirectCommand;
+
+typedef struct VkDrawIndexedIndirectCommand {
+    uint32_t    indexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstIndex;
+    int32_t     vertexOffset;
+    uint32_t    firstInstance;
+} VkDrawIndexedIndirectCommand;
+
+typedef struct VkDrawIndirectCommand {
+    uint32_t    vertexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstVertex;
+    uint32_t    firstInstance;
+} VkDrawIndirectCommand;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
+typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
+typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue);
+typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
+typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
+typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory);
+typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore);
+typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent);
+typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage);
+typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule);
+typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler);
+typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets);
+typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers);
+typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor);
+typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data);
+typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(
+    VkQueue                                     queue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+#endif
+
+#define VK_KHR_surface 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR)
+
+#define VK_KHR_SURFACE_SPEC_VERSION       25
+#define VK_KHR_SURFACE_EXTENSION_NAME     "VK_KHR_surface"
+#define VK_COLORSPACE_SRGB_NONLINEAR_KHR  VK_COLOR_SPACE_SRGB_NONLINEAR_KHR
+
+
+typedef enum VkColorSpaceKHR {
+    VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0,
+    VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001,
+    VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002,
+    VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = 1000104003,
+    VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004,
+    VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005,
+    VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006,
+    VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007,
+    VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008,
+    VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009,
+    VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010,
+    VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011,
+    VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012,
+    VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013,
+    VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014,
+    VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1),
+    VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkColorSpaceKHR;
+
+typedef enum VkPresentModeKHR {
+    VK_PRESENT_MODE_IMMEDIATE_KHR = 0,
+    VK_PRESENT_MODE_MAILBOX_KHR = 1,
+    VK_PRESENT_MODE_FIFO_KHR = 2,
+    VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3,
+    VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000,
+    VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001,
+    VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+    VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1),
+    VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPresentModeKHR;
+
+
+typedef enum VkSurfaceTransformFlagBitsKHR {
+    VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001,
+    VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002,
+    VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004,
+    VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080,
+    VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100,
+    VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSurfaceTransformFlagBitsKHR;
+typedef VkFlags VkSurfaceTransformFlagsKHR;
+
+typedef enum VkCompositeAlphaFlagBitsKHR {
+    VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002,
+    VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004,
+    VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008,
+    VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkCompositeAlphaFlagBitsKHR;
+typedef VkFlags VkCompositeAlphaFlagsKHR;
+
+typedef struct VkSurfaceCapabilitiesKHR {
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+} VkSurfaceCapabilitiesKHR;
+
+typedef struct VkSurfaceFormatKHR {
+    VkFormat           format;
+    VkColorSpaceKHR    colorSpace;
+} VkSurfaceFormatKHR;
+
+
+typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+#endif
+
+#define VK_KHR_swapchain 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR)
+
+#define VK_KHR_SWAPCHAIN_SPEC_VERSION     68
+#define VK_KHR_SWAPCHAIN_EXTENSION_NAME   "VK_KHR_swapchain"
+
+
+typedef enum VkSwapchainCreateFlagBitsKHR {
+    VK_SWAPCHAIN_CREATE_BIND_SFR_BIT_KHX = 0x00000001,
+    VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSwapchainCreateFlagBitsKHR;
+typedef VkFlags VkSwapchainCreateFlagsKHR;
+
+typedef struct VkSwapchainCreateInfoKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSwapchainCreateFlagsKHR        flags;
+    VkSurfaceKHR                     surface;
+    uint32_t                         minImageCount;
+    VkFormat                         imageFormat;
+    VkColorSpaceKHR                  imageColorSpace;
+    VkExtent2D                       imageExtent;
+    uint32_t                         imageArrayLayers;
+    VkImageUsageFlags                imageUsage;
+    VkSharingMode                    imageSharingMode;
+    uint32_t                         queueFamilyIndexCount;
+    const uint32_t*                  pQueueFamilyIndices;
+    VkSurfaceTransformFlagBitsKHR    preTransform;
+    VkCompositeAlphaFlagBitsKHR      compositeAlpha;
+    VkPresentModeKHR                 presentMode;
+    VkBool32                         clipped;
+    VkSwapchainKHR                   oldSwapchain;
+} VkSwapchainCreateInfoKHR;
+
+typedef struct VkPresentInfoKHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 waitSemaphoreCount;
+    const VkSemaphore*       pWaitSemaphores;
+    uint32_t                 swapchainCount;
+    const VkSwapchainKHR*    pSwapchains;
+    const uint32_t*          pImageIndices;
+    VkResult*                pResults;
+} VkPresentInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain);
+typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex);
+typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo);
+#endif
+
+#define VK_KHR_display 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR)
+
+#define VK_KHR_DISPLAY_SPEC_VERSION       21
+#define VK_KHR_DISPLAY_EXTENSION_NAME     "VK_KHR_display"
+
+
+typedef enum VkDisplayPlaneAlphaFlagBitsKHR {
+    VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008,
+    VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDisplayPlaneAlphaFlagBitsKHR;
+typedef VkFlags VkDisplayPlaneAlphaFlagsKHR;
+typedef VkFlags VkDisplayModeCreateFlagsKHR;
+typedef VkFlags VkDisplaySurfaceCreateFlagsKHR;
+
+typedef struct VkDisplayPropertiesKHR {
+    VkDisplayKHR                  display;
+    const char*                   displayName;
+    VkExtent2D                    physicalDimensions;
+    VkExtent2D                    physicalResolution;
+    VkSurfaceTransformFlagsKHR    supportedTransforms;
+    VkBool32                      planeReorderPossible;
+    VkBool32                      persistentContent;
+} VkDisplayPropertiesKHR;
+
+typedef struct VkDisplayModeParametersKHR {
+    VkExtent2D    visibleRegion;
+    uint32_t      refreshRate;
+} VkDisplayModeParametersKHR;
+
+typedef struct VkDisplayModePropertiesKHR {
+    VkDisplayModeKHR              displayMode;
+    VkDisplayModeParametersKHR    parameters;
+} VkDisplayModePropertiesKHR;
+
+typedef struct VkDisplayModeCreateInfoKHR {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDisplayModeCreateFlagsKHR    flags;
+    VkDisplayModeParametersKHR     parameters;
+} VkDisplayModeCreateInfoKHR;
+
+typedef struct VkDisplayPlaneCapabilitiesKHR {
+    VkDisplayPlaneAlphaFlagsKHR    supportedAlpha;
+    VkOffset2D                     minSrcPosition;
+    VkOffset2D                     maxSrcPosition;
+    VkExtent2D                     minSrcExtent;
+    VkExtent2D                     maxSrcExtent;
+    VkOffset2D                     minDstPosition;
+    VkOffset2D                     maxDstPosition;
+    VkExtent2D                     minDstExtent;
+    VkExtent2D                     maxDstExtent;
+} VkDisplayPlaneCapabilitiesKHR;
+
+typedef struct VkDisplayPlanePropertiesKHR {
+    VkDisplayKHR    currentDisplay;
+    uint32_t        currentStackIndex;
+} VkDisplayPlanePropertiesKHR;
+
+typedef struct VkDisplaySurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkDisplaySurfaceCreateFlagsKHR    flags;
+    VkDisplayModeKHR                  displayMode;
+    uint32_t                          planeIndex;
+    uint32_t                          planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR     transform;
+    float                             globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR    alphaMode;
+    VkExtent2D                        imageExtent;
+} VkDisplaySurfaceCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#define VK_KHR_display_swapchain 1
+#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9
+#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain"
+
+typedef struct VkDisplayPresentInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkRect2D           srcRect;
+    VkRect2D           dstRect;
+    VkBool32           persistent;
+} VkDisplayPresentInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#define VK_KHR_xlib_surface 1
+#include <X11/Xlib.h>
+
+#define VK_KHR_XLIB_SURFACE_SPEC_VERSION  6
+#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface"
+
+typedef VkFlags VkXlibSurfaceCreateFlagsKHR;
+
+typedef struct VkXlibSurfaceCreateInfoKHR {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkXlibSurfaceCreateFlagsKHR    flags;
+    Display*                       dpy;
+    Window                         window;
+} VkXlibSurfaceCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID);
+#endif
+#endif /* VK_USE_PLATFORM_XLIB_KHR */
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+#define VK_KHR_xcb_surface 1
+#include <xcb/xcb.h>
+
+#define VK_KHR_XCB_SURFACE_SPEC_VERSION   6
+#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface"
+
+typedef VkFlags VkXcbSurfaceCreateFlagsKHR;
+
+typedef struct VkXcbSurfaceCreateInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkXcbSurfaceCreateFlagsKHR    flags;
+    xcb_connection_t*             connection;
+    xcb_window_t                  window;
+} VkXcbSurfaceCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id);
+#endif
+#endif /* VK_USE_PLATFORM_XCB_KHR */
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+#define VK_KHR_wayland_surface 1
+#include <wayland-client.h>
+
+#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6
+#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface"
+
+typedef VkFlags VkWaylandSurfaceCreateFlagsKHR;
+
+typedef struct VkWaylandSurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkWaylandSurfaceCreateFlagsKHR    flags;
+    struct wl_display*                display;
+    struct wl_surface*                surface;
+} VkWaylandSurfaceCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display);
+#endif
+#endif /* VK_USE_PLATFORM_WAYLAND_KHR */
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+#define VK_KHR_mir_surface 1
+#include <mir_toolkit/client_types.h>
+
+#define VK_KHR_MIR_SURFACE_SPEC_VERSION   4
+#define VK_KHR_MIR_SURFACE_EXTENSION_NAME "VK_KHR_mir_surface"
+
+typedef VkFlags VkMirSurfaceCreateFlagsKHR;
+
+typedef struct VkMirSurfaceCreateInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkMirSurfaceCreateFlagsKHR    flags;
+    MirConnection*                connection;
+    MirSurface*                   mirSurface;
+} VkMirSurfaceCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMirSurfaceKHR)(VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR(
+    VkInstance                                  instance,
+    const VkMirSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    MirConnection*                              connection);
+#endif
+#endif /* VK_USE_PLATFORM_MIR_KHR */
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#define VK_KHR_android_surface 1
+#include <android/native_window.h>
+
+#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6
+#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface"
+
+typedef VkFlags VkAndroidSurfaceCreateFlagsKHR;
+
+typedef struct VkAndroidSurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkAndroidSurfaceCreateFlagsKHR    flags;
+    ANativeWindow*                    window;
+} VkAndroidSurfaceCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+#endif /* VK_USE_PLATFORM_ANDROID_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#define VK_KHR_win32_surface 1
+#include <windows.h>
+
+#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6
+#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface"
+
+typedef VkFlags VkWin32SurfaceCreateFlagsKHR;
+
+typedef struct VkWin32SurfaceCreateInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkWin32SurfaceCreateFlagsKHR    flags;
+    HINSTANCE                       hinstance;
+    HWND                            hwnd;
+} VkWin32SurfaceCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex);
+#endif
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#define VK_KHR_sampler_mirror_clamp_to_edge 1
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 1
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge"
+
+
+#define VK_KHR_get_physical_device_properties2 1
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2"
+
+typedef struct VkPhysicalDeviceFeatures2KHR {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPhysicalDeviceFeatures    features;
+} VkPhysicalDeviceFeatures2KHR;
+
+typedef struct VkPhysicalDeviceProperties2KHR {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkPhysicalDeviceProperties    properties;
+} VkPhysicalDeviceProperties2KHR;
+
+typedef struct VkFormatProperties2KHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkFormatProperties    formatProperties;
+} VkFormatProperties2KHR;
+
+typedef struct VkImageFormatProperties2KHR {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkImageFormatProperties    imageFormatProperties;
+} VkImageFormatProperties2KHR;
+
+typedef struct VkPhysicalDeviceImageFormatInfo2KHR {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFormat              format;
+    VkImageType           type;
+    VkImageTiling         tiling;
+    VkImageUsageFlags     usage;
+    VkImageCreateFlags    flags;
+} VkPhysicalDeviceImageFormatInfo2KHR;
+
+typedef struct VkQueueFamilyProperties2KHR {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkQueueFamilyProperties    queueFamilyProperties;
+} VkQueueFamilyProperties2KHR;
+
+typedef struct VkPhysicalDeviceMemoryProperties2KHR {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkPhysicalDeviceMemoryProperties    memoryProperties;
+} VkPhysicalDeviceMemoryProperties2KHR;
+
+typedef struct VkSparseImageFormatProperties2KHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSparseImageFormatProperties    properties;
+} VkSparseImageFormatProperties2KHR;
+
+typedef struct VkPhysicalDeviceSparseImageFormatInfo2KHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkFormat                 format;
+    VkImageType              type;
+    VkSampleCountFlagBits    samples;
+    VkImageUsageFlags        usage;
+    VkImageTiling            tiling;
+} VkPhysicalDeviceSparseImageFormatInfo2KHR;
+
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, VkImageFormatProperties2KHR* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2KHR* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2KHR*               pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2KHR*             pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2KHR*                     pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2KHR*  pImageFormatInfo,
+    VkImageFormatProperties2KHR*                pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2KHR*                pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2KHR*       pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2KHR*          pProperties);
+#endif
+
+#define VK_KHR_shader_draw_parameters 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters"
+
+
+#define VK_KHR_maintenance1 1
+#define VK_KHR_MAINTENANCE1_SPEC_VERSION  1
+#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1"
+
+typedef VkFlags VkCommandPoolTrimFlagsKHR;
+
+typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlagsKHR                   flags);
+#endif
+
+#define VK_KHR_external_memory_capabilities 1
+#define VK_LUID_SIZE_KHR                  8
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities"
+
+
+typedef enum VkExternalMemoryHandleTypeFlagBitsKHR {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = 0x00000010,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = 0x00000020,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = 0x00000040,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBitsKHR;
+typedef VkFlags VkExternalMemoryHandleTypeFlagsKHR;
+
+typedef enum VkExternalMemoryFeatureFlagBitsKHR {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBitsKHR;
+typedef VkFlags VkExternalMemoryFeatureFlagsKHR;
+
+typedef struct VkExternalMemoryPropertiesKHR {
+    VkExternalMemoryFeatureFlagsKHR       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlagsKHR    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlagsKHR    compatibleHandleTypes;
+} VkExternalMemoryPropertiesKHR;
+
+typedef struct VkPhysicalDeviceExternalImageFormatInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkExternalMemoryHandleTypeFlagBitsKHR    handleType;
+} VkPhysicalDeviceExternalImageFormatInfoKHR;
+
+typedef struct VkExternalImageFormatPropertiesKHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkExternalMemoryPropertiesKHR    externalMemoryProperties;
+} VkExternalImageFormatPropertiesKHR;
+
+typedef struct VkPhysicalDeviceExternalBufferInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkBufferCreateFlags                      flags;
+    VkBufferUsageFlags                       usage;
+    VkExternalMemoryHandleTypeFlagBitsKHR    handleType;
+} VkPhysicalDeviceExternalBufferInfoKHR;
+
+typedef struct VkExternalBufferPropertiesKHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkExternalMemoryPropertiesKHR    externalMemoryProperties;
+} VkExternalBufferPropertiesKHR;
+
+typedef struct VkPhysicalDeviceIDPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            deviceUUID[VK_UUID_SIZE];
+    uint8_t            driverUUID[VK_UUID_SIZE];
+    uint8_t            deviceLUID[VK_LUID_SIZE_KHR];
+    uint32_t           deviceNodeMask;
+    VkBool32           deviceLUIDValid;
+} VkPhysicalDeviceIDPropertiesKHR;
+
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, VkExternalBufferPropertiesKHR* pExternalBufferProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo,
+    VkExternalBufferPropertiesKHR*              pExternalBufferProperties);
+#endif
+
+#define VK_KHR_external_memory 1
+#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory"
+#define VK_QUEUE_FAMILY_EXTERNAL_KHR      (~0U-1)
+
+typedef struct VkExternalMemoryImageCreateInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagsKHR    handleTypes;
+} VkExternalMemoryImageCreateInfoKHR;
+
+typedef struct VkExternalMemoryBufferCreateInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagsKHR    handleTypes;
+} VkExternalMemoryBufferCreateInfoKHR;
+
+typedef struct VkExportMemoryAllocateInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagsKHR    handleTypes;
+} VkExportMemoryAllocateInfoKHR;
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#define VK_KHR_external_memory_win32 1
+#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32"
+
+typedef struct VkImportMemoryWin32HandleInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkExternalMemoryHandleTypeFlagBitsKHR    handleType;
+    HANDLE                                   handle;
+    LPCWSTR                                  name;
+} VkImportMemoryWin32HandleInfoKHR;
+
+typedef struct VkExportMemoryWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportMemoryWin32HandleInfoKHR;
+
+typedef struct VkMemoryWin32HandlePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryWin32HandlePropertiesKHR;
+
+typedef struct VkMemoryGetWin32HandleInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkDeviceMemory                           memory;
+    VkExternalMemoryHandleTypeFlagBitsKHR    handleType;
+} VkMemoryGetWin32HandleInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBitsKHR       handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties);
+#endif
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#define VK_KHR_external_memory_fd 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd"
+
+typedef struct VkImportMemoryFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkExternalMemoryHandleTypeFlagBitsKHR    handleType;
+    int                                      fd;
+} VkImportMemoryFdInfoKHR;
+
+typedef struct VkMemoryFdPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryFdPropertiesKHR;
+
+typedef struct VkMemoryGetFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkDeviceMemory                           memory;
+    VkExternalMemoryHandleTypeFlagBitsKHR    handleType;
+} VkMemoryGetFdInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBitsKHR       handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#define VK_KHR_win32_keyed_mutex 1
+#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1
+#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex"
+
+typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 acquireCount;
+    const VkDeviceMemory*    pAcquireSyncs;
+    const uint64_t*          pAcquireKeys;
+    const uint32_t*          pAcquireTimeouts;
+    uint32_t                 releaseCount;
+    const VkDeviceMemory*    pReleaseSyncs;
+    const uint64_t*          pReleaseKeys;
+} VkWin32KeyedMutexAcquireReleaseInfoKHR;
+
+
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#define VK_KHR_external_semaphore_capabilities 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities"
+
+
+typedef enum VkExternalSemaphoreHandleTypeFlagBitsKHR {
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = 0x00000008,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000010,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkExternalSemaphoreHandleTypeFlagBitsKHR;
+typedef VkFlags VkExternalSemaphoreHandleTypeFlagsKHR;
+
+typedef enum VkExternalSemaphoreFeatureFlagBitsKHR {
+    VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkExternalSemaphoreFeatureFlagBitsKHR;
+typedef VkFlags VkExternalSemaphoreFeatureFlagsKHR;
+
+typedef struct VkPhysicalDeviceExternalSemaphoreInfoKHR {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkExternalSemaphoreHandleTypeFlagBitsKHR    handleType;
+} VkPhysicalDeviceExternalSemaphoreInfoKHR;
+
+typedef struct VkExternalSemaphorePropertiesKHR {
+    VkStructureType                          sType;
+    void*                                    pNext;
+    VkExternalSemaphoreHandleTypeFlagsKHR    exportFromImportedHandleTypes;
+    VkExternalSemaphoreHandleTypeFlagsKHR    compatibleHandleTypes;
+    VkExternalSemaphoreFeatureFlagsKHR       externalSemaphoreFeatures;
+} VkExternalSemaphorePropertiesKHR;
+
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo,
+    VkExternalSemaphorePropertiesKHR*           pExternalSemaphoreProperties);
+#endif
+
+#define VK_KHR_external_semaphore 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore"
+
+
+typedef enum VkSemaphoreImportFlagBitsKHR {
+    VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001,
+    VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSemaphoreImportFlagBitsKHR;
+typedef VkFlags VkSemaphoreImportFlagsKHR;
+
+typedef struct VkExportSemaphoreCreateInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkExternalSemaphoreHandleTypeFlagsKHR    handleTypes;
+} VkExportSemaphoreCreateInfoKHR;
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#define VK_KHR_external_semaphore_win32 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32"
+
+typedef struct VkImportSemaphoreWin32HandleInfoKHR {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkSemaphore                                 semaphore;
+    VkSemaphoreImportFlagsKHR                   flags;
+    VkExternalSemaphoreHandleTypeFlagBitsKHR    handleType;
+    HANDLE                                      handle;
+    LPCWSTR                                     name;
+} VkImportSemaphoreWin32HandleInfoKHR;
+
+typedef struct VkExportSemaphoreWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportSemaphoreWin32HandleInfoKHR;
+
+typedef struct VkD3D12FenceSubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreValuesCount;
+    const uint64_t*    pWaitSemaphoreValues;
+    uint32_t           signalSemaphoreValuesCount;
+    const uint64_t*    pSignalSemaphoreValues;
+} VkD3D12FenceSubmitInfoKHR;
+
+typedef struct VkSemaphoreGetWin32HandleInfoKHR {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkSemaphore                                 semaphore;
+    VkExternalSemaphoreHandleTypeFlagBitsKHR    handleType;
+} VkSemaphoreGetWin32HandleInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#define VK_KHR_external_semaphore_fd 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd"
+
+typedef struct VkImportSemaphoreFdInfoKHR {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkSemaphore                                 semaphore;
+    VkSemaphoreImportFlagsKHR                   flags;
+    VkExternalSemaphoreHandleTypeFlagBitsKHR    handleType;
+    int                                         fd;
+} VkImportSemaphoreFdInfoKHR;
+
+typedef struct VkSemaphoreGetFdInfoKHR {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkSemaphore                                 semaphore;
+    VkExternalSemaphoreHandleTypeFlagBitsKHR    handleType;
+} VkSemaphoreGetFdInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+#define VK_KHR_push_descriptor 1
+#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 1
+#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor"
+
+typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxPushDescriptors;
+} VkPhysicalDevicePushDescriptorPropertiesKHR;
+
+
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+#endif
+
+#define VK_KHR_16bit_storage 1
+#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1
+#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage"
+
+typedef struct VkPhysicalDevice16BitStorageFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer16BitAccess;
+    VkBool32           uniformAndStorageBuffer16BitAccess;
+    VkBool32           storagePushConstant16;
+    VkBool32           storageInputOutput16;
+} VkPhysicalDevice16BitStorageFeaturesKHR;
+
+
+
+#define VK_KHR_incremental_present 1
+#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1
+#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present"
+
+typedef struct VkRectLayerKHR {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+    uint32_t      layer;
+} VkRectLayerKHR;
+
+typedef struct VkPresentRegionKHR {
+    uint32_t                 rectangleCount;
+    const VkRectLayerKHR*    pRectangles;
+} VkPresentRegionKHR;
+
+typedef struct VkPresentRegionsKHR {
+    VkStructureType              sType;
+    const void*                  pNext;
+    uint32_t                     swapchainCount;
+    const VkPresentRegionKHR*    pRegions;
+} VkPresentRegionsKHR;
+
+
+
+#define VK_KHR_descriptor_update_template 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplateKHR)
+
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template"
+
+
+typedef enum VkDescriptorUpdateTemplateTypeKHR {
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = 0,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE_KHR = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR + 1),
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDescriptorUpdateTemplateTypeKHR;
+
+typedef VkFlags VkDescriptorUpdateTemplateCreateFlagsKHR;
+
+typedef struct VkDescriptorUpdateTemplateEntryKHR {
+    uint32_t            dstBinding;
+    uint32_t            dstArrayElement;
+    uint32_t            descriptorCount;
+    VkDescriptorType    descriptorType;
+    size_t              offset;
+    size_t              stride;
+} VkDescriptorUpdateTemplateEntryKHR;
+
+typedef struct VkDescriptorUpdateTemplateCreateInfoKHR {
+    VkStructureType                              sType;
+    void*                                        pNext;
+    VkDescriptorUpdateTemplateCreateFlagsKHR     flags;
+    uint32_t                                     descriptorUpdateEntryCount;
+    const VkDescriptorUpdateTemplateEntryKHR*    pDescriptorUpdateEntries;
+    VkDescriptorUpdateTemplateTypeKHR            templateType;
+    VkDescriptorSetLayout                        descriptorSetLayout;
+    VkPipelineBindPoint                          pipelineBindPoint;
+    VkPipelineLayout                             pipelineLayout;
+    uint32_t                                     set;
+} VkDescriptorUpdateTemplateCreateInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplateKHR*              pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplateKHR               descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplateKHR               descriptorUpdateTemplate,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplateKHR               descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+#endif
+
+#define VK_KHR_shared_presentable_image 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image"
+
+typedef struct VkSharedPresentSurfaceCapabilitiesKHR {
+    VkStructureType      sType;
+    void*                pNext;
+    VkImageUsageFlags    sharedPresentSupportedUsageFlags;
+} VkSharedPresentSurfaceCapabilitiesKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+#endif
+
+#define VK_KHR_external_fence_capabilities 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities"
+
+
+typedef enum VkExternalFenceHandleTypeFlagBitsKHR {
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000008,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkExternalFenceHandleTypeFlagBitsKHR;
+typedef VkFlags VkExternalFenceHandleTypeFlagsKHR;
+
+typedef enum VkExternalFenceFeatureFlagBitsKHR {
+    VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001,
+    VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002,
+    VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkExternalFenceFeatureFlagBitsKHR;
+typedef VkFlags VkExternalFenceFeatureFlagsKHR;
+
+typedef struct VkPhysicalDeviceExternalFenceInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkExternalFenceHandleTypeFlagBitsKHR    handleType;
+} VkPhysicalDeviceExternalFenceInfoKHR;
+
+typedef struct VkExternalFencePropertiesKHR {
+    VkStructureType                      sType;
+    void*                                pNext;
+    VkExternalFenceHandleTypeFlagsKHR    exportFromImportedHandleTypes;
+    VkExternalFenceHandleTypeFlagsKHR    compatibleHandleTypes;
+    VkExternalFenceFeatureFlagsKHR       externalFenceFeatures;
+} VkExternalFencePropertiesKHR;
+
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, VkExternalFencePropertiesKHR* pExternalFenceProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo,
+    VkExternalFencePropertiesKHR*               pExternalFenceProperties);
+#endif
+
+#define VK_KHR_external_fence 1
+#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence"
+
+
+typedef enum VkFenceImportFlagBitsKHR {
+    VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001,
+    VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkFenceImportFlagBitsKHR;
+typedef VkFlags VkFenceImportFlagsKHR;
+
+typedef struct VkExportFenceCreateInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalFenceHandleTypeFlagsKHR    handleTypes;
+} VkExportFenceCreateInfoKHR;
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#define VK_KHR_external_fence_win32 1
+#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32"
+
+typedef struct VkImportFenceWin32HandleInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkFence                                 fence;
+    VkFenceImportFlagsKHR                   flags;
+    VkExternalFenceHandleTypeFlagBitsKHR    handleType;
+    HANDLE                                  handle;
+    LPCWSTR                                 name;
+} VkImportFenceWin32HandleInfoKHR;
+
+typedef struct VkExportFenceWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportFenceWin32HandleInfoKHR;
+
+typedef struct VkFenceGetWin32HandleInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkFence                                 fence;
+    VkExternalFenceHandleTypeFlagBitsKHR    handleType;
+} VkFenceGetWin32HandleInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#define VK_KHR_external_fence_fd 1
+#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd"
+
+typedef struct VkImportFenceFdInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkFence                                 fence;
+    VkFenceImportFlagsKHR                   flags;
+    VkExternalFenceHandleTypeFlagBitsKHR    handleType;
+    int                                     fd;
+} VkImportFenceFdInfoKHR;
+
+typedef struct VkFenceGetFdInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkFence                                 fence;
+    VkExternalFenceHandleTypeFlagBitsKHR    handleType;
+} VkFenceGetFdInfoKHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+#define VK_KHR_get_surface_capabilities2 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2"
+
+typedef struct VkPhysicalDeviceSurfaceInfo2KHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSurfaceKHR       surface;
+} VkPhysicalDeviceSurfaceInfo2KHR;
+
+typedef struct VkSurfaceCapabilities2KHR {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkSurfaceCapabilitiesKHR    surfaceCapabilities;
+} VkSurfaceCapabilities2KHR;
+
+typedef struct VkSurfaceFormat2KHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSurfaceFormatKHR    surfaceFormat;
+} VkSurfaceFormat2KHR;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats);
+#endif
+
+#define VK_KHR_variable_pointers 1
+#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1
+#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers"
+
+typedef struct VkPhysicalDeviceVariablePointerFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           variablePointersStorageBuffer;
+    VkBool32           variablePointers;
+} VkPhysicalDeviceVariablePointerFeaturesKHR;
+
+
+
+#define VK_KHR_dedicated_allocation 1
+#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 1
+#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation"
+
+typedef struct VkMemoryDedicatedRequirementsKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           prefersDedicatedAllocation;
+    VkBool32           requiresDedicatedAllocation;
+} VkMemoryDedicatedRequirementsKHR;
+
+typedef struct VkMemoryDedicatedAllocateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkMemoryDedicatedAllocateInfoKHR;
+
+
+
+#define VK_KHR_storage_buffer_storage_class 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class"
+
+
+#define VK_KHR_get_memory_requirements2 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2"
+
+typedef struct VkBufferMemoryRequirementsInfo2KHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferMemoryRequirementsInfo2KHR;
+
+typedef struct VkImageMemoryRequirementsInfo2KHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageMemoryRequirementsInfo2KHR;
+
+typedef struct VkImageSparseMemoryRequirementsInfo2KHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageSparseMemoryRequirementsInfo2KHR;
+
+typedef struct VkMemoryRequirements2KHR {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkMemoryRequirements    memoryRequirements;
+} VkMemoryRequirements2KHR;
+
+typedef struct VkSparseImageMemoryRequirements2KHR {
+    VkStructureType                    sType;
+    void*                              pNext;
+    VkSparseImageMemoryRequirements    memoryRequirements;
+} VkSparseImageMemoryRequirements2KHR;
+
+
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2KHR*    pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2KHR*   pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2KHR* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2KHR*        pSparseMemoryRequirements);
+#endif
+
+#define VK_EXT_debug_report 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT)
+
+#define VK_EXT_DEBUG_REPORT_SPEC_VERSION  8
+#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report"
+#define VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT
+#define VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT
+
+
+typedef enum VkDebugReportObjectTypeEXT {
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0,
+    VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10,
+    VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17,
+    VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30,
+    VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31,
+    VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = 1000085000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1),
+    VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportObjectTypeEXT;
+
+
+typedef enum VkDebugReportFlagBitsEXT {
+    VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001,
+    VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002,
+    VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004,
+    VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008,
+    VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010,
+    VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportFlagBitsEXT;
+typedef VkFlags VkDebugReportFlagsEXT;
+
+typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)(
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage,
+    void*                                       pUserData);
+
+
+typedef struct VkDebugReportCallbackCreateInfoEXT {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDebugReportFlagsEXT           flags;
+    PFN_vkDebugReportCallbackEXT    pfnCallback;
+    void*                           pUserData;
+} VkDebugReportCallbackCreateInfoEXT;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback);
+typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+#endif
+
+#define VK_NV_glsl_shader 1
+#define VK_NV_GLSL_SHADER_SPEC_VERSION    1
+#define VK_NV_GLSL_SHADER_EXTENSION_NAME  "VK_NV_glsl_shader"
+
+
+#define VK_IMG_filter_cubic 1
+#define VK_IMG_FILTER_CUBIC_SPEC_VERSION  1
+#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic"
+
+
+#define VK_AMD_rasterization_order 1
+#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1
+#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order"
+
+
+typedef enum VkRasterizationOrderAMD {
+    VK_RASTERIZATION_ORDER_STRICT_AMD = 0,
+    VK_RASTERIZATION_ORDER_RELAXED_AMD = 1,
+    VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD,
+    VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1),
+    VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkRasterizationOrderAMD;
+
+typedef struct VkPipelineRasterizationStateRasterizationOrderAMD {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkRasterizationOrderAMD    rasterizationOrder;
+} VkPipelineRasterizationStateRasterizationOrderAMD;
+
+
+
+#define VK_AMD_shader_trinary_minmax 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax"
+
+
+#define VK_AMD_shader_explicit_vertex_parameter 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter"
+
+
+#define VK_EXT_debug_marker 1
+#define VK_EXT_DEBUG_MARKER_SPEC_VERSION  4
+#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker"
+
+typedef struct VkDebugMarkerObjectNameInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    const char*                   pObjectName;
+} VkDebugMarkerObjectNameInfoEXT;
+
+typedef struct VkDebugMarkerObjectTagInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    uint64_t                      tagName;
+    size_t                        tagSize;
+    const void*                   pTag;
+} VkDebugMarkerObjectTagInfoEXT;
+
+typedef struct VkDebugMarkerMarkerInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pMarkerName;
+    float              color[4];
+} VkDebugMarkerMarkerInfoEXT;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+#endif
+
+#define VK_AMD_gcn_shader 1
+#define VK_AMD_GCN_SHADER_SPEC_VERSION    1
+#define VK_AMD_GCN_SHADER_EXTENSION_NAME  "VK_AMD_gcn_shader"
+
+
+#define VK_NV_dedicated_allocation 1
+#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1
+#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation"
+
+typedef struct VkDedicatedAllocationImageCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationImageCreateInfoNV;
+
+typedef struct VkDedicatedAllocationBufferCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationBufferCreateInfoNV;
+
+typedef struct VkDedicatedAllocationMemoryAllocateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkDedicatedAllocationMemoryAllocateInfoNV;
+
+
+
+#define VK_AMD_draw_indirect_count 1
+#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 1
+#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count"
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+#define VK_AMD_negative_viewport_height 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height"
+
+
+#define VK_AMD_gpu_shader_half_float 1
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 1
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float"
+
+
+#define VK_AMD_shader_ballot 1
+#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1
+#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot"
+
+
+#define VK_AMD_texture_gather_bias_lod 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod"
+
+typedef struct VkTextureLODGatherFormatPropertiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           supportsTextureGatherLODBiasAMD;
+} VkTextureLODGatherFormatPropertiesAMD;
+
+
+
+#define VK_KHX_multiview 1
+#define VK_KHX_MULTIVIEW_SPEC_VERSION     1
+#define VK_KHX_MULTIVIEW_EXTENSION_NAME   "VK_KHX_multiview"
+
+typedef struct VkRenderPassMultiviewCreateInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           subpassCount;
+    const uint32_t*    pViewMasks;
+    uint32_t           dependencyCount;
+    const int32_t*     pViewOffsets;
+    uint32_t           correlationMaskCount;
+    const uint32_t*    pCorrelationMasks;
+} VkRenderPassMultiviewCreateInfoKHX;
+
+typedef struct VkPhysicalDeviceMultiviewFeaturesKHX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiview;
+    VkBool32           multiviewGeometryShader;
+    VkBool32           multiviewTessellationShader;
+} VkPhysicalDeviceMultiviewFeaturesKHX;
+
+typedef struct VkPhysicalDeviceMultiviewPropertiesKHX {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxMultiviewViewCount;
+    uint32_t           maxMultiviewInstanceIndex;
+} VkPhysicalDeviceMultiviewPropertiesKHX;
+
+
+
+#define VK_IMG_format_pvrtc 1
+#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION  1
+#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc"
+
+
+#define VK_NV_external_memory_capabilities 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities"
+
+
+typedef enum VkExternalMemoryHandleTypeFlagBitsNV {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBitsNV;
+typedef VkFlags VkExternalMemoryHandleTypeFlagsNV;
+
+typedef enum VkExternalMemoryFeatureFlagBitsNV {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBitsNV;
+typedef VkFlags VkExternalMemoryFeatureFlagsNV;
+
+typedef struct VkExternalImageFormatPropertiesNV {
+    VkImageFormatProperties              imageFormatProperties;
+    VkExternalMemoryFeatureFlagsNV       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlagsNV    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlagsNV    compatibleHandleTypes;
+} VkExternalImageFormatPropertiesNV;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties);
+#endif
+
+#define VK_NV_external_memory 1
+#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory"
+
+typedef struct VkExternalMemoryImageCreateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExternalMemoryImageCreateInfoNV;
+
+typedef struct VkExportMemoryAllocateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExportMemoryAllocateInfoNV;
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#define VK_NV_external_memory_win32 1
+#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32"
+
+typedef struct VkImportMemoryWin32HandleInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleType;
+    HANDLE                               handle;
+} VkImportMemoryWin32HandleInfoNV;
+
+typedef struct VkExportMemoryWin32HandleInfoNV {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+} VkExportMemoryWin32HandleInfoNV;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle);
+#endif
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#define VK_NV_win32_keyed_mutex 1
+#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 1
+#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex"
+
+typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 acquireCount;
+    const VkDeviceMemory*    pAcquireSyncs;
+    const uint64_t*          pAcquireKeys;
+    const uint32_t*          pAcquireTimeoutMilliseconds;
+    uint32_t                 releaseCount;
+    const VkDeviceMemory*    pReleaseSyncs;
+    const uint64_t*          pReleaseKeys;
+} VkWin32KeyedMutexAcquireReleaseInfoNV;
+
+
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#define VK_KHX_device_group 1
+#define VK_MAX_DEVICE_GROUP_SIZE_KHX      32
+#define VK_KHX_DEVICE_GROUP_SPEC_VERSION  1
+#define VK_KHX_DEVICE_GROUP_EXTENSION_NAME "VK_KHX_device_group"
+
+
+typedef enum VkPeerMemoryFeatureFlagBitsKHX {
+    VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHX = 0x00000001,
+    VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHX = 0x00000002,
+    VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHX = 0x00000004,
+    VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHX = 0x00000008,
+    VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF
+} VkPeerMemoryFeatureFlagBitsKHX;
+typedef VkFlags VkPeerMemoryFeatureFlagsKHX;
+
+typedef enum VkMemoryAllocateFlagBitsKHX {
+    VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHX = 0x00000001,
+    VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF
+} VkMemoryAllocateFlagBitsKHX;
+typedef VkFlags VkMemoryAllocateFlagsKHX;
+
+typedef enum VkDeviceGroupPresentModeFlagBitsKHX {
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHX = 0x00000001,
+    VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHX = 0x00000002,
+    VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHX = 0x00000004,
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHX = 0x00000008,
+    VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF
+} VkDeviceGroupPresentModeFlagBitsKHX;
+typedef VkFlags VkDeviceGroupPresentModeFlagsKHX;
+
+typedef struct VkMemoryAllocateFlagsInfoKHX {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkMemoryAllocateFlagsKHX    flags;
+    uint32_t                    deviceMask;
+} VkMemoryAllocateFlagsInfoKHX;
+
+typedef struct VkBindBufferMemoryInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+} VkBindBufferMemoryInfoKHX;
+
+typedef struct VkBindImageMemoryInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+    uint32_t           SFRRectCount;
+    const VkRect2D*    pSFRRects;
+} VkBindImageMemoryInfoKHX;
+
+typedef struct VkDeviceGroupRenderPassBeginInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+    uint32_t           deviceRenderAreaCount;
+    const VkRect2D*    pDeviceRenderAreas;
+} VkDeviceGroupRenderPassBeginInfoKHX;
+
+typedef struct VkDeviceGroupCommandBufferBeginInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+} VkDeviceGroupCommandBufferBeginInfoKHX;
+
+typedef struct VkDeviceGroupSubmitInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreCount;
+    const uint32_t*    pWaitSemaphoreDeviceIndices;
+    uint32_t           commandBufferCount;
+    const uint32_t*    pCommandBufferDeviceMasks;
+    uint32_t           signalSemaphoreCount;
+    const uint32_t*    pSignalSemaphoreDeviceIndices;
+} VkDeviceGroupSubmitInfoKHX;
+
+typedef struct VkDeviceGroupBindSparseInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           resourceDeviceIndex;
+    uint32_t           memoryDeviceIndex;
+} VkDeviceGroupBindSparseInfoKHX;
+
+typedef struct VkDeviceGroupPresentCapabilitiesKHX {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    uint32_t                            presentMask[VK_MAX_DEVICE_GROUP_SIZE_KHX];
+    VkDeviceGroupPresentModeFlagsKHX    modes;
+} VkDeviceGroupPresentCapabilitiesKHX;
+
+typedef struct VkImageSwapchainCreateInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+} VkImageSwapchainCreateInfoKHX;
+
+typedef struct VkBindImageMemorySwapchainInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint32_t           imageIndex;
+} VkBindImageMemorySwapchainInfoKHX;
+
+typedef struct VkAcquireNextImageInfoKHX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint64_t           timeout;
+    VkSemaphore        semaphore;
+    VkFence            fence;
+    uint32_t           deviceMask;
+} VkAcquireNextImageInfoKHX;
+
+typedef struct VkDeviceGroupPresentInfoKHX {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    uint32_t                               swapchainCount;
+    const uint32_t*                        pDeviceMasks;
+    VkDeviceGroupPresentModeFlagBitsKHX    mode;
+} VkDeviceGroupPresentInfoKHX;
+
+typedef struct VkDeviceGroupSwapchainCreateInfoKHX {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkDeviceGroupPresentModeFlagsKHX    modes;
+} VkDeviceGroupSwapchainCreateInfoKHX;
+
+
+typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHX)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHX)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHX* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHX)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHX* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHX)(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHX)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHX)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHX* pModes);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHX)(VkDevice device, const VkAcquireNextImageInfoKHX* pAcquireInfo, uint32_t* pImageIndex);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHX)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHX)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHX(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlagsKHX*                pPeerMemoryFeatures);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHX(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfoKHX*            pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHX(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfoKHX*             pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHX(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHX(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHX*        pDeviceGroupPresentCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHX(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHX*           pModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHX(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHX*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHX(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHX(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+#endif
+
+#define VK_EXT_validation_flags 1
+#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1
+#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags"
+
+
+typedef enum VkValidationCheckEXT {
+    VK_VALIDATION_CHECK_ALL_EXT = 0,
+    VK_VALIDATION_CHECK_SHADERS_EXT = 1,
+    VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT,
+    VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT,
+    VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1),
+    VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationCheckEXT;
+
+typedef struct VkValidationFlagsEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 disabledValidationCheckCount;
+    VkValidationCheckEXT*    pDisabledValidationChecks;
+} VkValidationFlagsEXT;
+
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+#define VK_NN_vi_surface 1
+#define VK_NN_VI_SURFACE_SPEC_VERSION     1
+#define VK_NN_VI_SURFACE_EXTENSION_NAME   "VK_NN_vi_surface"
+
+typedef VkFlags VkViSurfaceCreateFlagsNN;
+
+typedef struct VkViSurfaceCreateInfoNN {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkViSurfaceCreateFlagsNN    flags;
+    void*                       window;
+} VkViSurfaceCreateInfoNN;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+#endif /* VK_USE_PLATFORM_VI_NN */
+
+#define VK_EXT_shader_subgroup_ballot 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot"
+
+
+#define VK_EXT_shader_subgroup_vote 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote"
+
+
+#define VK_KHX_device_group_creation 1
+#define VK_KHX_DEVICE_GROUP_CREATION_SPEC_VERSION 1
+#define VK_KHX_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHX_device_group_creation"
+
+typedef struct VkPhysicalDeviceGroupPropertiesKHX {
+    VkStructureType     sType;
+    void*               pNext;
+    uint32_t            physicalDeviceCount;
+    VkPhysicalDevice    physicalDevices[VK_MAX_DEVICE_GROUP_SIZE_KHX];
+    VkBool32            subsetAllocation;
+} VkPhysicalDeviceGroupPropertiesKHX;
+
+typedef struct VkDeviceGroupDeviceCreateInfoKHX {
+    VkStructureType            sType;
+    const void*                pNext;
+    uint32_t                   physicalDeviceCount;
+    const VkPhysicalDevice*    pPhysicalDevices;
+} VkDeviceGroupDeviceCreateInfoKHX;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHX)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHX* pPhysicalDeviceGroupProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHX(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupPropertiesKHX*         pPhysicalDeviceGroupProperties);
+#endif
+
+#define VK_NVX_device_generated_commands 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX)
+
+#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 1
+#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands"
+
+
+typedef enum VkIndirectCommandsTokenTypeNVX {
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1),
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkIndirectCommandsTokenTypeNVX;
+
+typedef enum VkObjectEntryTypeNVX {
+    VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0,
+    VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1,
+    VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2,
+    VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3,
+    VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4,
+    VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
+    VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX,
+    VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1),
+    VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkObjectEntryTypeNVX;
+
+
+typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX {
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkIndirectCommandsLayoutUsageFlagBitsNVX;
+typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX;
+
+typedef enum VkObjectEntryUsageFlagBitsNVX {
+    VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001,
+    VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002,
+    VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkObjectEntryUsageFlagBitsNVX;
+typedef VkFlags VkObjectEntryUsageFlagsNVX;
+
+typedef struct VkDeviceGeneratedCommandsFeaturesNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           computeBindingPointSupport;
+} VkDeviceGeneratedCommandsFeaturesNVX;
+
+typedef struct VkDeviceGeneratedCommandsLimitsNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxIndirectCommandsLayoutTokenCount;
+    uint32_t           maxObjectEntryCounts;
+    uint32_t           minSequenceCountBufferOffsetAlignment;
+    uint32_t           minSequenceIndexBufferOffsetAlignment;
+    uint32_t           minCommandsTokenBufferOffsetAlignment;
+} VkDeviceGeneratedCommandsLimitsNVX;
+
+typedef struct VkIndirectCommandsTokenNVX {
+    VkIndirectCommandsTokenTypeNVX    tokenType;
+    VkBuffer                          buffer;
+    VkDeviceSize                      offset;
+} VkIndirectCommandsTokenNVX;
+
+typedef struct VkIndirectCommandsLayoutTokenNVX {
+    VkIndirectCommandsTokenTypeNVX    tokenType;
+    uint32_t                          bindingUnit;
+    uint32_t                          dynamicCount;
+    uint32_t                          divisor;
+} VkIndirectCommandsLayoutTokenNVX;
+
+typedef struct VkIndirectCommandsLayoutCreateInfoNVX {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineBindPoint                        pipelineBindPoint;
+    VkIndirectCommandsLayoutUsageFlagsNVX      flags;
+    uint32_t                                   tokenCount;
+    const VkIndirectCommandsLayoutTokenNVX*    pTokens;
+} VkIndirectCommandsLayoutCreateInfoNVX;
+
+typedef struct VkCmdProcessCommandsInfoNVX {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkObjectTableNVX                     objectTable;
+    VkIndirectCommandsLayoutNVX          indirectCommandsLayout;
+    uint32_t                             indirectCommandsTokenCount;
+    const VkIndirectCommandsTokenNVX*    pIndirectCommandsTokens;
+    uint32_t                             maxSequencesCount;
+    VkCommandBuffer                      targetCommandBuffer;
+    VkBuffer                             sequencesCountBuffer;
+    VkDeviceSize                         sequencesCountOffset;
+    VkBuffer                             sequencesIndexBuffer;
+    VkDeviceSize                         sequencesIndexOffset;
+} VkCmdProcessCommandsInfoNVX;
+
+typedef struct VkCmdReserveSpaceForCommandsInfoNVX {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkObjectTableNVX               objectTable;
+    VkIndirectCommandsLayoutNVX    indirectCommandsLayout;
+    uint32_t                       maxSequencesCount;
+} VkCmdReserveSpaceForCommandsInfoNVX;
+
+typedef struct VkObjectTableCreateInfoNVX {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    uint32_t                             objectCount;
+    const VkObjectEntryTypeNVX*          pObjectEntryTypes;
+    const uint32_t*                      pObjectEntryCounts;
+    const VkObjectEntryUsageFlagsNVX*    pObjectEntryUsageFlags;
+    uint32_t                             maxUniformBuffersPerDescriptor;
+    uint32_t                             maxStorageBuffersPerDescriptor;
+    uint32_t                             maxStorageImagesPerDescriptor;
+    uint32_t                             maxSampledImagesPerDescriptor;
+    uint32_t                             maxPipelineLayouts;
+} VkObjectTableCreateInfoNVX;
+
+typedef struct VkObjectTableEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+} VkObjectTableEntryNVX;
+
+typedef struct VkObjectTablePipelineEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipeline                    pipeline;
+} VkObjectTablePipelineEntryNVX;
+
+typedef struct VkObjectTableDescriptorSetEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipelineLayout              pipelineLayout;
+    VkDescriptorSet               descriptorSet;
+} VkObjectTableDescriptorSetEntryNVX;
+
+typedef struct VkObjectTableVertexBufferEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkBuffer                      buffer;
+} VkObjectTableVertexBufferEntryNVX;
+
+typedef struct VkObjectTableIndexBufferEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkBuffer                      buffer;
+    VkIndexType                   indexType;
+} VkObjectTableIndexBufferEntryNVX;
+
+typedef struct VkObjectTablePushConstantEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipelineLayout              pipelineLayout;
+    VkShaderStageFlags            stageFlags;
+} VkObjectTablePushConstantEntryNVX;
+
+
+typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable);
+typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices);
+typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits);
+#endif
+
+#define VK_NV_clip_space_w_scaling 1
+#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1
+#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling"
+
+typedef struct VkViewportWScalingNV {
+    float    xcoeff;
+    float    ycoeff;
+} VkViewportWScalingNV;
+
+typedef struct VkPipelineViewportWScalingStateCreateInfoNV {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkBool32                       viewportWScalingEnable;
+    uint32_t                       viewportCount;
+    const VkViewportWScalingNV*    pViewportWScalings;
+} VkPipelineViewportWScalingStateCreateInfoNV;
+
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+#endif
+
+#define VK_EXT_direct_mode_display 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display"
+
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#define VK_EXT_acquire_xlib_display 1
+#include <X11/extensions/Xrandr.h>
+
+#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display"
+
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay);
+#endif
+#endif /* VK_USE_PLATFORM_XLIB_XRANDR_EXT */
+
+#define VK_EXT_display_surface_counter 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter"
+
+
+typedef enum VkSurfaceCounterFlagBitsEXT {
+    VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001,
+    VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSurfaceCounterFlagBitsEXT;
+typedef VkFlags VkSurfaceCounterFlagsEXT;
+
+typedef struct VkSurfaceCapabilities2EXT {
+    VkStructureType                  sType;
+    void*                            pNext;
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+    VkSurfaceCounterFlagsEXT         supportedSurfaceCounters;
+} VkSurfaceCapabilities2EXT;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+#endif
+
+#define VK_EXT_display_control 1
+#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control"
+
+
+typedef enum VkDisplayPowerStateEXT {
+    VK_DISPLAY_POWER_STATE_OFF_EXT = 0,
+    VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1,
+    VK_DISPLAY_POWER_STATE_ON_EXT = 2,
+    VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT,
+    VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT,
+    VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1),
+    VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayPowerStateEXT;
+
+typedef enum VkDeviceEventTypeEXT {
+    VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0,
+    VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT,
+    VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT,
+    VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1),
+    VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceEventTypeEXT;
+
+typedef enum VkDisplayEventTypeEXT {
+    VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0,
+    VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT,
+    VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT,
+    VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1),
+    VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayEventTypeEXT;
+
+typedef struct VkDisplayPowerInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkDisplayPowerStateEXT    powerState;
+} VkDisplayPowerInfoEXT;
+
+typedef struct VkDeviceEventInfoEXT {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkDeviceEventTypeEXT    deviceEvent;
+} VkDeviceEventInfoEXT;
+
+typedef struct VkDisplayEventInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkDisplayEventTypeEXT    displayEvent;
+} VkDisplayEventInfoEXT;
+
+typedef struct VkSwapchainCounterCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSurfaceCounterFlagsEXT    surfaceCounters;
+} VkSwapchainCounterCreateInfoEXT;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+#endif
+
+#define VK_GOOGLE_display_timing 1
+#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1
+#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing"
+
+typedef struct VkRefreshCycleDurationGOOGLE {
+    uint64_t    refreshDuration;
+} VkRefreshCycleDurationGOOGLE;
+
+typedef struct VkPastPresentationTimingGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+    uint64_t    actualPresentTime;
+    uint64_t    earliestPresentTime;
+    uint64_t    presentMargin;
+} VkPastPresentationTimingGOOGLE;
+
+typedef struct VkPresentTimeGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+} VkPresentTimeGOOGLE;
+
+typedef struct VkPresentTimesInfoGOOGLE {
+    VkStructureType               sType;
+    const void*                   pNext;
+    uint32_t                      swapchainCount;
+    const VkPresentTimeGOOGLE*    pTimes;
+} VkPresentTimesInfoGOOGLE;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+#endif
+
+#define VK_NV_sample_mask_override_coverage 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage"
+
+
+#define VK_NV_geometry_shader_passthrough 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough"
+
+
+#define VK_NV_viewport_array2 1
+#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2"
+
+
+#define VK_NVX_multiview_per_view_attributes 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes"
+
+typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           perViewPositionAllComponents;
+} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+
+
+#define VK_NV_viewport_swizzle 1
+#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle"
+
+
+typedef enum VkViewportCoordinateSwizzleNV {
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1),
+    VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkViewportCoordinateSwizzleNV;
+
+typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV;
+
+typedef struct VkViewportSwizzleNV {
+    VkViewportCoordinateSwizzleNV    x;
+    VkViewportCoordinateSwizzleNV    y;
+    VkViewportCoordinateSwizzleNV    z;
+    VkViewportCoordinateSwizzleNV    w;
+} VkViewportSwizzleNV;
+
+typedef struct VkPipelineViewportSwizzleStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineViewportSwizzleStateCreateFlagsNV    flags;
+    uint32_t                                       viewportCount;
+    const VkViewportSwizzleNV*                     pViewportSwizzles;
+} VkPipelineViewportSwizzleStateCreateInfoNV;
+
+
+
+#define VK_EXT_discard_rectangles 1
+#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1
+#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles"
+
+
+typedef enum VkDiscardRectangleModeEXT {
+    VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0,
+    VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1,
+    VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+    VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT,
+    VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1),
+    VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDiscardRectangleModeEXT;
+
+typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT;
+
+typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxDiscardRectangles;
+} VkPhysicalDeviceDiscardRectanglePropertiesEXT;
+
+typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineDiscardRectangleStateCreateFlagsEXT    flags;
+    VkDiscardRectangleModeEXT                        discardRectangleMode;
+    uint32_t                                         discardRectangleCount;
+    const VkRect2D*                                  pDiscardRectangles;
+} VkPipelineDiscardRectangleStateCreateInfoEXT;
+
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+#endif
+
+#define VK_EXT_swapchain_colorspace 1
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 3
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace"
+
+
+#define VK_EXT_hdr_metadata 1
+#define VK_EXT_HDR_METADATA_SPEC_VERSION  1
+#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata"
+
+typedef struct VkXYColorEXT {
+    float    x;
+    float    y;
+} VkXYColorEXT;
+
+typedef struct VkHdrMetadataEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkXYColorEXT       displayPrimaryRed;
+    VkXYColorEXT       displayPrimaryGreen;
+    VkXYColorEXT       displayPrimaryBlue;
+    VkXYColorEXT       whitePoint;
+    float              maxLuminance;
+    float              minLuminance;
+    float              maxContentLightLevel;
+    float              maxFrameAverageLightLevel;
+} VkHdrMetadataEXT;
+
+
+typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+#endif
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+#define VK_MVK_ios_surface 1
+#define VK_MVK_IOS_SURFACE_SPEC_VERSION   2
+#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface"
+
+typedef VkFlags VkIOSSurfaceCreateFlagsMVK;
+
+typedef struct VkIOSSurfaceCreateInfoMVK {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkIOSSurfaceCreateFlagsMVK    flags;
+    const void*                   pView;
+} VkIOSSurfaceCreateInfoMVK;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+#endif /* VK_USE_PLATFORM_IOS_MVK */
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+#define VK_MVK_macos_surface 1
+#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2
+#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface"
+
+typedef VkFlags VkMacOSSurfaceCreateFlagsMVK;
+
+typedef struct VkMacOSSurfaceCreateInfoMVK {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkMacOSSurfaceCreateFlagsMVK    flags;
+    const void*                     pView;
+} VkMacOSSurfaceCreateInfoMVK;
+
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+#endif /* VK_USE_PLATFORM_MACOS_MVK */
+
+#define VK_EXT_sampler_filter_minmax 1
+#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 1
+#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax"
+
+
+typedef enum VkSamplerReductionModeEXT {
+    VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0,
+    VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1,
+    VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2,
+    VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
+    VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT,
+    VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1),
+    VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSamplerReductionModeEXT;
+
+typedef struct VkSamplerReductionModeCreateInfoEXT {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkSamplerReductionModeEXT    reductionMode;
+} VkSamplerReductionModeCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           filterMinmaxSingleComponentFormats;
+    VkBool32           filterMinmaxImageComponentMapping;
+} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+
+
+
+#define VK_AMD_gpu_shader_int16 1
+#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 1
+#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16"
+
+
+#define VK_EXT_blend_operation_advanced 1
+#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2
+#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced"
+
+
+typedef enum VkBlendOverlapEXT {
+    VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0,
+    VK_BLEND_OVERLAP_DISJOINT_EXT = 1,
+    VK_BLEND_OVERLAP_CONJOINT_EXT = 2,
+    VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+    VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT,
+    VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1),
+    VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBlendOverlapEXT;
+
+typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           advancedBlendCoherentOperations;
+} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           advancedBlendMaxColorAttachments;
+    VkBool32           advancedBlendIndependentBlend;
+    VkBool32           advancedBlendNonPremultipliedSrcColor;
+    VkBool32           advancedBlendNonPremultipliedDstColor;
+    VkBool32           advancedBlendCorrelatedOverlap;
+    VkBool32           advancedBlendAllOperations;
+} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkBool32             srcPremultiplied;
+    VkBool32             dstPremultiplied;
+    VkBlendOverlapEXT    blendOverlap;
+} VkPipelineColorBlendAdvancedStateCreateInfoEXT;
+
+
+
+#define VK_NV_fragment_coverage_to_color 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color"
+
+typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV;
+
+typedef struct VkPipelineCoverageToColorStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineCoverageToColorStateCreateFlagsNV    flags;
+    VkBool32                                       coverageToColorEnable;
+    uint32_t                                       coverageToColorLocation;
+} VkPipelineCoverageToColorStateCreateInfoNV;
+
+
+
+#define VK_NV_framebuffer_mixed_samples 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples"
+
+
+typedef enum VkCoverageModulationModeNV {
+    VK_COVERAGE_MODULATION_MODE_NONE_NV = 0,
+    VK_COVERAGE_MODULATION_MODE_RGB_NV = 1,
+    VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2,
+    VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3,
+    VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+    VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV,
+    VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1),
+    VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoverageModulationModeNV;
+
+typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV;
+
+typedef struct VkPipelineCoverageModulationStateCreateInfoNV {
+    VkStructureType                                   sType;
+    const void*                                       pNext;
+    VkPipelineCoverageModulationStateCreateFlagsNV    flags;
+    VkCoverageModulationModeNV                        coverageModulationMode;
+    VkBool32                                          coverageModulationTableEnable;
+    uint32_t                                          coverageModulationTableCount;
+    const float*                                      pCoverageModulationTable;
+} VkPipelineCoverageModulationStateCreateInfoNV;
+
+
+
+#define VK_NV_fill_rectangle 1
+#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1
+#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle"
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
new file mode 100644
index 000000000..191307f2d
--- /dev/null
+++ b/include/vulkan/vulkan.hpp
@@ -0,0 +1,32807 @@
+// Copyright (c) 2015-2017 The Khronos Group Inc.
+// 
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// 
+//     http://www.apache.org/licenses/LICENSE-2.0
+// 
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HPP
+#define VULKAN_HPP
+
+#include <algorithm>
+#include <array>
+#include <cassert>
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <initializer_list>
+#include <string>
+#include <system_error>
+#include <tuple>
+#include <type_traits>
+#include <vulkan/vulkan.h>
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+# include <memory>
+# include <vector>
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+static_assert( VK_HEADER_VERSION ==  55 , "Wrong VK_HEADER_VERSION!" );
+
+// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#  define VULKAN_HPP_TYPESAFE_CONVERSION
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
+# if defined(__clang__)
+#  if __has_feature(cxx_unrestricted_unions)
+#   define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#  endif
+# elif defined(__GNUC__)
+#  define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
+#  if 40600 <= GCC_VERSION
+#   define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#  endif
+# elif defined(_MSC_VER)
+#  if 1900 <= _MSC_VER
+#   define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#  endif
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_INLINE)
+# if defined(__clang___)
+#  if __has_attribute(always_inline)
+#   define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+#  else
+#    define VULKAN_HPP_INLINE inline
+#  endif
+# elif defined(__GNUC__)
+#  define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+# elif defined(_MSC_VER)
+#  define VULKAN_HPP_INLINE __forceinline
+# else
+#  define VULKAN_HPP_INLINE inline
+# endif
+#endif
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+# define VULKAN_HPP_TYPESAFE_EXPLICIT
+#else
+# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
+#endif
+
+namespace vk
+{
+
+  template <typename FlagBitsType> struct FlagTraits
+  {
+    enum { allFlags = 0 };
+  };
+
+  template <typename BitType, typename MaskType = VkFlags>
+  class Flags
+  {
+  public:
+    Flags()
+      : m_mask(0)
+    {
+    }
+
+    Flags(BitType bit)
+      : m_mask(static_cast<MaskType>(bit))
+    {
+    }
+
+    Flags(Flags<BitType> const& rhs)
+      : m_mask(rhs.m_mask)
+    {
+    }
+
+    Flags<BitType> & operator=(Flags<BitType> const& rhs)
+    {
+      m_mask = rhs.m_mask;
+      return *this;
+    }
+
+    Flags<BitType> & operator|=(Flags<BitType> const& rhs)
+    {
+      m_mask |= rhs.m_mask;
+      return *this;
+    }
+
+    Flags<BitType> & operator&=(Flags<BitType> const& rhs)
+    {
+      m_mask &= rhs.m_mask;
+      return *this;
+    }
+
+    Flags<BitType> & operator^=(Flags<BitType> const& rhs)
+    {
+      m_mask ^= rhs.m_mask;
+      return *this;
+    }
+
+    Flags<BitType> operator|(Flags<BitType> const& rhs) const
+    {
+      Flags<BitType> result(*this);
+      result |= rhs;
+      return result;
+    }
+
+    Flags<BitType> operator&(Flags<BitType> const& rhs) const
+    {
+      Flags<BitType> result(*this);
+      result &= rhs;
+      return result;
+    }
+
+    Flags<BitType> operator^(Flags<BitType> const& rhs) const
+    {
+      Flags<BitType> result(*this);
+      result ^= rhs;
+      return result;
+    }
+
+    bool operator!() const
+    {
+      return !m_mask;
+    }
+
+    Flags<BitType> operator~() const
+    {
+      Flags<BitType> result(*this);
+      result.m_mask ^= FlagTraits<BitType>::allFlags;
+      return result;
+    }
+
+    bool operator==(Flags<BitType> const& rhs) const
+    {
+      return m_mask == rhs.m_mask;
+    }
+
+    bool operator!=(Flags<BitType> const& rhs) const
+    {
+      return m_mask != rhs.m_mask;
+    }
+
+    explicit operator bool() const
+    {
+      return !!m_mask;
+    }
+
+    explicit operator MaskType() const
+    {
+        return m_mask;
+    }
+
+  private:
+    MaskType  m_mask;
+  };
+
+  template <typename BitType>
+  Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags)
+  {
+    return flags | bit;
+  }
+
+  template <typename BitType>
+  Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags)
+  {
+    return flags & bit;
+  }
+
+  template <typename BitType>
+  Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags)
+  {
+    return flags ^ bit;
+  }
+
+
+  template <typename RefType>
+  class Optional
+  {
+  public:
+    Optional(RefType & reference) { m_ptr = &reference; }
+    Optional(RefType * ptr) { m_ptr = ptr; }
+    Optional(std::nullptr_t) { m_ptr = nullptr; }
+
+    operator RefType*() const { return m_ptr; }
+    RefType const* operator->() const { return m_ptr; }
+    explicit operator bool() const { return !!m_ptr; }
+
+  private:
+    RefType *m_ptr;
+  };
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T>
+  class ArrayProxy
+  {
+  public:
+    ArrayProxy(std::nullptr_t)
+      : m_count(0)
+      , m_ptr(nullptr)
+    {}
+
+    ArrayProxy(T & ptr)
+      : m_count(1)
+      , m_ptr(&ptr)
+    {}
+
+    ArrayProxy(uint32_t count, T * ptr)
+      : m_count(count)
+      , m_ptr(ptr)
+    {}
+
+    template <size_t N>
+    ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data)
+      : m_count(N)
+      , m_ptr(data.data())
+    {}
+
+    template <size_t N>
+    ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data)
+      : m_count(N)
+      , m_ptr(data.data())
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data)
+      : m_count(static_cast<uint32_t>(data.size()))
+      , m_ptr(data.data())
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data)
+      : m_count(static_cast<uint32_t>(data.size()))
+      , m_ptr(data.data())
+    {}
+
+    ArrayProxy(std::initializer_list<T> const& data)
+      : m_count(static_cast<uint32_t>(data.end() - data.begin()))
+      , m_ptr(data.begin())
+    {}
+
+    const T * begin() const
+    {
+      return m_ptr;
+    }
+
+    const T * end() const
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const
+    {
+      assert(m_count && m_ptr);
+      return *m_ptr;
+    }
+
+    const T & back() const
+    {
+      assert(m_count && m_ptr);
+      return *(m_ptr + m_count - 1);
+    }
+
+    bool empty() const
+    {
+      return (m_count == 0);
+    }
+
+    uint32_t size() const
+    {
+      return m_count;
+    }
+
+    T * data() const
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t  m_count;
+    T *       m_ptr;
+  };
+#endif
+
+#if defined(VULKAN_HPP_NO_EXCEPTIONS) && !defined(VULKAN_HPP_NO_SMART_HANDLE)
+#  define VULKAN_HPP_NO_SMART_HANDLE
+#endif
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Type, typename Deleter>
+  class UniqueHandle
+  {
+  public:
+    explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() )
+      : m_value( value )
+      , m_deleter( deleter )
+    {}
+
+    UniqueHandle( UniqueHandle const& ) = delete;
+
+    UniqueHandle( UniqueHandle && other )
+      : m_value( other.release() )
+      , m_deleter( std::move( other.m_deleter ) )
+    {}
+
+    ~UniqueHandle()
+    {
+      destroy();
+    }
+
+    UniqueHandle & operator=( UniqueHandle const& ) = delete;
+
+    UniqueHandle & operator=( UniqueHandle && other )
+    {
+      reset( other.release() );
+      m_deleter = std::move( other.m_deleter );
+      return *this;
+    }
+
+    explicit operator bool() const
+    {
+      return m_value.operator bool();
+    }
+
+    Type const* operator->() const
+    {
+      return &m_value;
+    }
+
+    Type const& operator*() const
+    {
+      return m_value;
+    }
+
+    Type get() const
+    {
+      return m_value;
+    }
+
+    Deleter & getDeleter()
+    {
+      return m_deleter;
+    }
+
+    Deleter const& getDeleter() const
+    {
+      return m_deleter;
+    }
+
+    void reset( Type const& value = Type() )
+    {
+      if ( m_value != value )
+      {
+        destroy();
+        m_value = value;
+      }
+    }
+
+    Type release()
+    {
+      Type value = m_value;
+      m_value = nullptr;
+      return value;
+    }
+
+    void swap( UniqueHandle<Type, Deleter> & rhs )
+    {
+      std::swap(m_value, rhs.m_value);
+      std::swap(m_deleter, rhs.m_deleter);
+    }
+
+  private:
+    void destroy()
+    {
+      if ( m_value )
+      {
+        m_deleter( m_value );
+      }
+    }
+
+  private:
+    Type    m_value;
+    Deleter m_deleter;
+  };
+
+  template <typename Type, typename Deleter>
+  VULKAN_HPP_INLINE void swap( UniqueHandle<Type,Deleter> & lhs, UniqueHandle<Type,Deleter> & rhs )
+  {
+    lhs.swap( rhs );
+  }
+#endif
+
+  enum class Result
+  {
+    eSuccess = VK_SUCCESS,
+    eNotReady = VK_NOT_READY,
+    eTimeout = VK_TIMEOUT,
+    eEventSet = VK_EVENT_SET,
+    eEventReset = VK_EVENT_RESET,
+    eIncomplete = VK_INCOMPLETE,
+    eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
+    eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
+    eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
+    eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
+    eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
+    eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
+    eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
+    eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
+    eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
+    eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
+    eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
+    eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+    eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
+    eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
+    eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
+    eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
+    eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
+    eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
+    eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+    eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
+    eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string(Result value)
+  {
+    switch (value)
+    {
+    case Result::eSuccess: return "Success";
+    case Result::eNotReady: return "NotReady";
+    case Result::eTimeout: return "Timeout";
+    case Result::eEventSet: return "EventSet";
+    case Result::eEventReset: return "EventReset";
+    case Result::eIncomplete: return "Incomplete";
+    case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
+    case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
+    case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
+    case Result::eErrorDeviceLost: return "ErrorDeviceLost";
+    case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
+    case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
+    case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
+    case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
+    case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
+    case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
+    case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
+    case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
+    case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
+    case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
+    case Result::eSuboptimalKHR: return "SuboptimalKHR";
+    case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
+    case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
+    case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
+    case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
+    case Result::eErrorOutOfPoolMemoryKHR: return "ErrorOutOfPoolMemoryKHR";
+    case Result::eErrorInvalidExternalHandleKHR: return "ErrorInvalidExternalHandleKHR";
+    default: return "invalid";
+    }
+  }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# define noexcept _NOEXCEPT
+#endif
+
+  class ErrorCategoryImpl : public std::error_category
+  {
+    public:
+    virtual const char* name() const noexcept override { return "vk::Result"; }
+    virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
+  };
+
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# undef noexcept
+#endif
+
+  VULKAN_HPP_INLINE const std::error_category& errorCategory()
+  {
+    static ErrorCategoryImpl instance;
+    return instance;
+  }
+
+  VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
+  {
+    return std::error_code(static_cast<int>(e), errorCategory());
+  }
+
+  VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
+  {
+    return std::error_condition(static_cast<int>(e), errorCategory());
+  }
+
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# define noexcept _NOEXCEPT
+#endif
+
+  class Error
+  {
+    public:
+    virtual ~Error() = default;
+
+    virtual const char* what() const noexcept = 0;
+  };
+
+  class LogicError : public Error, public std::logic_error
+  {
+    public:
+    explicit LogicError( const std::string& what )
+      : Error(), std::logic_error(what) {}
+    explicit LogicError( char const * what )
+      : Error(), std::logic_error(what) {}
+    virtual ~LogicError() = default;
+
+    virtual const char* what() const noexcept { return std::logic_error::what(); }
+  };
+
+  class SystemError : public Error, public std::system_error
+  {
+    public:
+    SystemError( std::error_code ec )
+      : Error(), std::system_error(ec) {}
+    SystemError( std::error_code ec, std::string const& what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( std::error_code ec, char const * what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( int ev, std::error_category const& ecat )
+      : Error(), std::system_error(ev, ecat) {}
+    SystemError( int ev, std::error_category const& ecat, std::string const& what)
+      : Error(), std::system_error(ev, ecat, what) {}
+    SystemError( int ev, std::error_category const& ecat, char const * what)
+      : Error(), std::system_error(ev, ecat, what) {}
+    virtual ~SystemError() = default;
+
+    virtual const char* what() const noexcept { return std::system_error::what(); }
+  };
+
+#if defined(_MSC_VER) && (_MSC_VER == 1800)
+# undef noexcept
+#endif
+
+  class OutOfHostMemoryError : public SystemError
+  {
+  public:
+    OutOfHostMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+    OutOfHostMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+  };
+  class OutOfDeviceMemoryError : public SystemError
+  {
+  public:
+    OutOfDeviceMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+    OutOfDeviceMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+  };
+  class InitializationFailedError : public SystemError
+  {
+  public:
+    InitializationFailedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+    InitializationFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+  };
+  class DeviceLostError : public SystemError
+  {
+  public:
+    DeviceLostError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+    DeviceLostError( char const * message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+  };
+  class MemoryMapFailedError : public SystemError
+  {
+  public:
+    MemoryMapFailedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+    MemoryMapFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+  };
+  class LayerNotPresentError : public SystemError
+  {
+  public:
+    LayerNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+    LayerNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+  };
+  class ExtensionNotPresentError : public SystemError
+  {
+  public:
+    ExtensionNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+    ExtensionNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+  };
+  class FeatureNotPresentError : public SystemError
+  {
+  public:
+    FeatureNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+    FeatureNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+  };
+  class IncompatibleDriverError : public SystemError
+  {
+  public:
+    IncompatibleDriverError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+    IncompatibleDriverError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+  };
+  class TooManyObjectsError : public SystemError
+  {
+  public:
+    TooManyObjectsError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+    TooManyObjectsError( char const * message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+  };
+  class FormatNotSupportedError : public SystemError
+  {
+  public:
+    FormatNotSupportedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+    FormatNotSupportedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+  };
+  class FragmentedPoolError : public SystemError
+  {
+  public:
+    FragmentedPoolError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+    FragmentedPoolError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+  };
+  class SurfaceLostKHRError : public SystemError
+  {
+  public:
+    SurfaceLostKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+    SurfaceLostKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+  };
+  class NativeWindowInUseKHRError : public SystemError
+  {
+  public:
+    NativeWindowInUseKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+    NativeWindowInUseKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+  };
+  class OutOfDateKHRError : public SystemError
+  {
+  public:
+    OutOfDateKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+    OutOfDateKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+  };
+  class IncompatibleDisplayKHRError : public SystemError
+  {
+  public:
+    IncompatibleDisplayKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+    IncompatibleDisplayKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+  };
+  class ValidationFailedEXTError : public SystemError
+  {
+  public:
+    ValidationFailedEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+    ValidationFailedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+  };
+  class InvalidShaderNVError : public SystemError
+  {
+  public:
+    InvalidShaderNVError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+    InvalidShaderNVError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+  };
+  class OutOfPoolMemoryKHRError : public SystemError
+  {
+  public:
+    OutOfPoolMemoryKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemoryKHR ), message ) {}
+    OutOfPoolMemoryKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemoryKHR ), message ) {}
+  };
+  class InvalidExternalHandleKHRError : public SystemError
+  {
+  public:
+    InvalidExternalHandleKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandleKHR ), message ) {}
+    InvalidExternalHandleKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandleKHR ), message ) {}
+  };
+
+  VULKAN_HPP_INLINE void throwResultException( Result result, char const * message )
+  {
+    assert ( static_cast<long long int>(result) < 0 );
+    switch ( result )
+    {
+    case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError ( message );
+    case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError ( message );
+    case Result::eErrorInitializationFailed: throw InitializationFailedError ( message );
+    case Result::eErrorDeviceLost: throw DeviceLostError ( message );
+    case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError ( message );
+    case Result::eErrorLayerNotPresent: throw LayerNotPresentError ( message );
+    case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError ( message );
+    case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError ( message );
+    case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError ( message );
+    case Result::eErrorTooManyObjects: throw TooManyObjectsError ( message );
+    case Result::eErrorFormatNotSupported: throw FormatNotSupportedError ( message );
+    case Result::eErrorFragmentedPool: throw FragmentedPoolError ( message );
+    case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError ( message );
+    case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError ( message );
+    case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError ( message );
+    case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError ( message );
+    case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError ( message );
+    case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError ( message );
+    case Result::eErrorOutOfPoolMemoryKHR: throw OutOfPoolMemoryKHRError ( message );
+    case Result::eErrorInvalidExternalHandleKHR: throw InvalidExternalHandleKHRError ( message );
+    default: throw SystemError( make_error_code( result ) );
+    }
+  }
+#endif
+} // namespace vk
+
+namespace std
+{
+  template <>
+  struct is_error_code_enum<vk::Result> : public true_type
+  {};
+}
+
+namespace vk
+{
+
+  template <typename T>
+  struct ResultValue
+  {
+    ResultValue( Result r, T & v )
+      : result( r )
+      , value( v )
+    {}
+
+    Result  result;
+    T       value;
+
+    operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
+  };
+
+  template <typename T>
+  struct ResultValueType
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef ResultValue<T>  type;
+#else
+    typedef T              type;
+#endif
+  };
+
+  template <>
+  struct ResultValueType<void>
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef Result type;
+#else
+    typedef void   type;
+#endif
+  };
+
+  VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( result == Result::eSuccess );
+    return result;
+#else
+    if ( result != Result::eSuccess )
+    {
+      throwResultException( result, message );
+    }
+#endif
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( result == Result::eSuccess );
+    return ResultValue<T>( result, data );
+#else
+    if ( result != Result::eSuccess )
+    {
+      throwResultException( result, message );
+    }
+    return data;
+#endif
+  }
+
+  VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+    {
+      throwResultException( result, message );
+    }
+#endif
+    return result;
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+    {
+      throwResultException( result, message );
+    }
+#endif
+    return ResultValue<T>( result, data );
+  }
+
+  using SampleMask = uint32_t;
+
+  using Bool32 = uint32_t;
+
+  using DeviceSize = uint64_t;
+
+  enum class FramebufferCreateFlagBits
+  {
+  };
+
+  using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
+
+  enum class QueryPoolCreateFlagBits
+  {
+  };
+
+  using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
+
+  enum class RenderPassCreateFlagBits
+  {
+  };
+
+  using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
+
+  enum class SamplerCreateFlagBits
+  {
+  };
+
+  using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
+
+  enum class PipelineLayoutCreateFlagBits
+  {
+  };
+
+  using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
+
+  enum class PipelineCacheCreateFlagBits
+  {
+  };
+
+  using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
+
+  enum class PipelineDepthStencilStateCreateFlagBits
+  {
+  };
+
+  using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
+
+  enum class PipelineDynamicStateCreateFlagBits
+  {
+  };
+
+  using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
+
+  enum class PipelineColorBlendStateCreateFlagBits
+  {
+  };
+
+  using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
+
+  enum class PipelineMultisampleStateCreateFlagBits
+  {
+  };
+
+  using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
+
+  enum class PipelineRasterizationStateCreateFlagBits
+  {
+  };
+
+  using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
+
+  enum class PipelineViewportStateCreateFlagBits
+  {
+  };
+
+  using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
+
+  enum class PipelineTessellationStateCreateFlagBits
+  {
+  };
+
+  using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
+
+  enum class PipelineInputAssemblyStateCreateFlagBits
+  {
+  };
+
+  using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
+
+  enum class PipelineVertexInputStateCreateFlagBits
+  {
+  };
+
+  using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
+
+  enum class PipelineShaderStageCreateFlagBits
+  {
+  };
+
+  using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
+
+  enum class BufferViewCreateFlagBits
+  {
+  };
+
+  using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
+
+  enum class InstanceCreateFlagBits
+  {
+  };
+
+  using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
+
+  enum class DeviceCreateFlagBits
+  {
+  };
+
+  using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
+
+  enum class DeviceQueueCreateFlagBits
+  {
+  };
+
+  using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
+
+  enum class ImageViewCreateFlagBits
+  {
+  };
+
+  using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
+
+  enum class SemaphoreCreateFlagBits
+  {
+  };
+
+  using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
+
+  enum class ShaderModuleCreateFlagBits
+  {
+  };
+
+  using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
+
+  enum class EventCreateFlagBits
+  {
+  };
+
+  using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
+
+  enum class MemoryMapFlagBits
+  {
+  };
+
+  using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
+
+  enum class DescriptorPoolResetFlagBits
+  {
+  };
+
+  using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
+
+  enum class DescriptorUpdateTemplateCreateFlagBitsKHR
+  {
+  };
+
+  using DescriptorUpdateTemplateCreateFlagsKHR = Flags<DescriptorUpdateTemplateCreateFlagBitsKHR, VkDescriptorUpdateTemplateCreateFlagsKHR>;
+
+  enum class DisplayModeCreateFlagBitsKHR
+  {
+  };
+
+  using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
+
+  enum class DisplaySurfaceCreateFlagBitsKHR
+  {
+  };
+
+  using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  enum class AndroidSurfaceCreateFlagBitsKHR
+  {
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+  enum class MirSurfaceCreateFlagBitsKHR
+  {
+  };
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+  using MirSurfaceCreateFlagsKHR = Flags<MirSurfaceCreateFlagBitsKHR, VkMirSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  enum class ViSurfaceCreateFlagBitsNN
+  {
+  };
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN, VkViSurfaceCreateFlagsNN>;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  enum class WaylandSurfaceCreateFlagBitsKHR
+  {
+  };
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  enum class Win32SurfaceCreateFlagBitsKHR
+  {
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  enum class XlibSurfaceCreateFlagBitsKHR
+  {
+  };
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  enum class XcbSurfaceCreateFlagBitsKHR
+  {
+  };
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  enum class IOSSurfaceCreateFlagBitsMVK
+  {
+  };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK, VkIOSSurfaceCreateFlagsMVK>;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  enum class MacOSSurfaceCreateFlagBitsMVK
+  {
+  };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK, VkMacOSSurfaceCreateFlagsMVK>;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  enum class CommandPoolTrimFlagBitsKHR
+  {
+  };
+
+  using CommandPoolTrimFlagsKHR = Flags<CommandPoolTrimFlagBitsKHR, VkCommandPoolTrimFlagsKHR>;
+
+  enum class PipelineViewportSwizzleStateCreateFlagBitsNV
+  {
+  };
+
+  using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV, VkPipelineViewportSwizzleStateCreateFlagsNV>;
+
+  enum class PipelineDiscardRectangleStateCreateFlagBitsEXT
+  {
+  };
+
+  using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT, VkPipelineDiscardRectangleStateCreateFlagsEXT>;
+
+  enum class PipelineCoverageToColorStateCreateFlagBitsNV
+  {
+  };
+
+  using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV, VkPipelineCoverageToColorStateCreateFlagsNV>;
+
+  enum class PipelineCoverageModulationStateCreateFlagBitsNV
+  {
+  };
+
+  using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV, VkPipelineCoverageModulationStateCreateFlagsNV>;
+
+  class DeviceMemory
+  {
+  public:
+    DeviceMemory()
+      : m_deviceMemory(VK_NULL_HANDLE)
+    {}
+
+    DeviceMemory( std::nullptr_t )
+      : m_deviceMemory(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory )
+       : m_deviceMemory( deviceMemory )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DeviceMemory & operator=(VkDeviceMemory deviceMemory)
+    {
+      m_deviceMemory = deviceMemory;
+      return *this; 
+    }
+#endif
+
+    DeviceMemory & operator=( std::nullptr_t )
+    {
+      m_deviceMemory = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DeviceMemory const & rhs ) const
+    {
+      return m_deviceMemory == rhs.m_deviceMemory;
+    }
+
+    bool operator!=(DeviceMemory const & rhs ) const
+    {
+      return m_deviceMemory != rhs.m_deviceMemory;
+    }
+
+    bool operator<(DeviceMemory const & rhs ) const
+    {
+      return m_deviceMemory < rhs.m_deviceMemory;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const
+    {
+      return m_deviceMemory;
+    }
+
+    explicit operator bool() const
+    {
+      return m_deviceMemory != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_deviceMemory == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDeviceMemory m_deviceMemory;
+  };
+
+  static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+
+  class CommandPool
+  {
+  public:
+    CommandPool()
+      : m_commandPool(VK_NULL_HANDLE)
+    {}
+
+    CommandPool( std::nullptr_t )
+      : m_commandPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool )
+       : m_commandPool( commandPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandPool & operator=(VkCommandPool commandPool)
+    {
+      m_commandPool = commandPool;
+      return *this; 
+    }
+#endif
+
+    CommandPool & operator=( std::nullptr_t )
+    {
+      m_commandPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( CommandPool const & rhs ) const
+    {
+      return m_commandPool == rhs.m_commandPool;
+    }
+
+    bool operator!=(CommandPool const & rhs ) const
+    {
+      return m_commandPool != rhs.m_commandPool;
+    }
+
+    bool operator<(CommandPool const & rhs ) const
+    {
+      return m_commandPool < rhs.m_commandPool;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const
+    {
+      return m_commandPool;
+    }
+
+    explicit operator bool() const
+    {
+      return m_commandPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_commandPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandPool m_commandPool;
+  };
+
+  static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+
+  class Buffer
+  {
+  public:
+    Buffer()
+      : m_buffer(VK_NULL_HANDLE)
+    {}
+
+    Buffer( std::nullptr_t )
+      : m_buffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer )
+       : m_buffer( buffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Buffer & operator=(VkBuffer buffer)
+    {
+      m_buffer = buffer;
+      return *this; 
+    }
+#endif
+
+    Buffer & operator=( std::nullptr_t )
+    {
+      m_buffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Buffer const & rhs ) const
+    {
+      return m_buffer == rhs.m_buffer;
+    }
+
+    bool operator!=(Buffer const & rhs ) const
+    {
+      return m_buffer != rhs.m_buffer;
+    }
+
+    bool operator<(Buffer const & rhs ) const
+    {
+      return m_buffer < rhs.m_buffer;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const
+    {
+      return m_buffer;
+    }
+
+    explicit operator bool() const
+    {
+      return m_buffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_buffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBuffer m_buffer;
+  };
+
+  static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
+
+  class BufferView
+  {
+  public:
+    BufferView()
+      : m_bufferView(VK_NULL_HANDLE)
+    {}
+
+    BufferView( std::nullptr_t )
+      : m_bufferView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView )
+       : m_bufferView( bufferView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    BufferView & operator=(VkBufferView bufferView)
+    {
+      m_bufferView = bufferView;
+      return *this; 
+    }
+#endif
+
+    BufferView & operator=( std::nullptr_t )
+    {
+      m_bufferView = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( BufferView const & rhs ) const
+    {
+      return m_bufferView == rhs.m_bufferView;
+    }
+
+    bool operator!=(BufferView const & rhs ) const
+    {
+      return m_bufferView != rhs.m_bufferView;
+    }
+
+    bool operator<(BufferView const & rhs ) const
+    {
+      return m_bufferView < rhs.m_bufferView;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const
+    {
+      return m_bufferView;
+    }
+
+    explicit operator bool() const
+    {
+      return m_bufferView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_bufferView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBufferView m_bufferView;
+  };
+
+  static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+
+  class Image
+  {
+  public:
+    Image()
+      : m_image(VK_NULL_HANDLE)
+    {}
+
+    Image( std::nullptr_t )
+      : m_image(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image )
+       : m_image( image )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Image & operator=(VkImage image)
+    {
+      m_image = image;
+      return *this; 
+    }
+#endif
+
+    Image & operator=( std::nullptr_t )
+    {
+      m_image = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Image const & rhs ) const
+    {
+      return m_image == rhs.m_image;
+    }
+
+    bool operator!=(Image const & rhs ) const
+    {
+      return m_image != rhs.m_image;
+    }
+
+    bool operator<(Image const & rhs ) const
+    {
+      return m_image < rhs.m_image;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const
+    {
+      return m_image;
+    }
+
+    explicit operator bool() const
+    {
+      return m_image != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_image == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImage m_image;
+  };
+
+  static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+
+  class ImageView
+  {
+  public:
+    ImageView()
+      : m_imageView(VK_NULL_HANDLE)
+    {}
+
+    ImageView( std::nullptr_t )
+      : m_imageView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView )
+       : m_imageView( imageView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ImageView & operator=(VkImageView imageView)
+    {
+      m_imageView = imageView;
+      return *this; 
+    }
+#endif
+
+    ImageView & operator=( std::nullptr_t )
+    {
+      m_imageView = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( ImageView const & rhs ) const
+    {
+      return m_imageView == rhs.m_imageView;
+    }
+
+    bool operator!=(ImageView const & rhs ) const
+    {
+      return m_imageView != rhs.m_imageView;
+    }
+
+    bool operator<(ImageView const & rhs ) const
+    {
+      return m_imageView < rhs.m_imageView;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const
+    {
+      return m_imageView;
+    }
+
+    explicit operator bool() const
+    {
+      return m_imageView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_imageView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImageView m_imageView;
+  };
+
+  static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+
+  class ShaderModule
+  {
+  public:
+    ShaderModule()
+      : m_shaderModule(VK_NULL_HANDLE)
+    {}
+
+    ShaderModule( std::nullptr_t )
+      : m_shaderModule(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule )
+       : m_shaderModule( shaderModule )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ShaderModule & operator=(VkShaderModule shaderModule)
+    {
+      m_shaderModule = shaderModule;
+      return *this; 
+    }
+#endif
+
+    ShaderModule & operator=( std::nullptr_t )
+    {
+      m_shaderModule = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( ShaderModule const & rhs ) const
+    {
+      return m_shaderModule == rhs.m_shaderModule;
+    }
+
+    bool operator!=(ShaderModule const & rhs ) const
+    {
+      return m_shaderModule != rhs.m_shaderModule;
+    }
+
+    bool operator<(ShaderModule const & rhs ) const
+    {
+      return m_shaderModule < rhs.m_shaderModule;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const
+    {
+      return m_shaderModule;
+    }
+
+    explicit operator bool() const
+    {
+      return m_shaderModule != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_shaderModule == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkShaderModule m_shaderModule;
+  };
+
+  static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+
+  class Pipeline
+  {
+  public:
+    Pipeline()
+      : m_pipeline(VK_NULL_HANDLE)
+    {}
+
+    Pipeline( std::nullptr_t )
+      : m_pipeline(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline )
+       : m_pipeline( pipeline )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Pipeline & operator=(VkPipeline pipeline)
+    {
+      m_pipeline = pipeline;
+      return *this; 
+    }
+#endif
+
+    Pipeline & operator=( std::nullptr_t )
+    {
+      m_pipeline = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Pipeline const & rhs ) const
+    {
+      return m_pipeline == rhs.m_pipeline;
+    }
+
+    bool operator!=(Pipeline const & rhs ) const
+    {
+      return m_pipeline != rhs.m_pipeline;
+    }
+
+    bool operator<(Pipeline const & rhs ) const
+    {
+      return m_pipeline < rhs.m_pipeline;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const
+    {
+      return m_pipeline;
+    }
+
+    explicit operator bool() const
+    {
+      return m_pipeline != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_pipeline == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipeline m_pipeline;
+  };
+
+  static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+
+  class PipelineLayout
+  {
+  public:
+    PipelineLayout()
+      : m_pipelineLayout(VK_NULL_HANDLE)
+    {}
+
+    PipelineLayout( std::nullptr_t )
+      : m_pipelineLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout )
+       : m_pipelineLayout( pipelineLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineLayout & operator=(VkPipelineLayout pipelineLayout)
+    {
+      m_pipelineLayout = pipelineLayout;
+      return *this; 
+    }
+#endif
+
+    PipelineLayout & operator=( std::nullptr_t )
+    {
+      m_pipelineLayout = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( PipelineLayout const & rhs ) const
+    {
+      return m_pipelineLayout == rhs.m_pipelineLayout;
+    }
+
+    bool operator!=(PipelineLayout const & rhs ) const
+    {
+      return m_pipelineLayout != rhs.m_pipelineLayout;
+    }
+
+    bool operator<(PipelineLayout const & rhs ) const
+    {
+      return m_pipelineLayout < rhs.m_pipelineLayout;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const
+    {
+      return m_pipelineLayout;
+    }
+
+    explicit operator bool() const
+    {
+      return m_pipelineLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_pipelineLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineLayout m_pipelineLayout;
+  };
+
+  static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+
+  class Sampler
+  {
+  public:
+    Sampler()
+      : m_sampler(VK_NULL_HANDLE)
+    {}
+
+    Sampler( std::nullptr_t )
+      : m_sampler(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler )
+       : m_sampler( sampler )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Sampler & operator=(VkSampler sampler)
+    {
+      m_sampler = sampler;
+      return *this; 
+    }
+#endif
+
+    Sampler & operator=( std::nullptr_t )
+    {
+      m_sampler = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Sampler const & rhs ) const
+    {
+      return m_sampler == rhs.m_sampler;
+    }
+
+    bool operator!=(Sampler const & rhs ) const
+    {
+      return m_sampler != rhs.m_sampler;
+    }
+
+    bool operator<(Sampler const & rhs ) const
+    {
+      return m_sampler < rhs.m_sampler;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const
+    {
+      return m_sampler;
+    }
+
+    explicit operator bool() const
+    {
+      return m_sampler != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_sampler == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSampler m_sampler;
+  };
+
+  static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+
+  class DescriptorSet
+  {
+  public:
+    DescriptorSet()
+      : m_descriptorSet(VK_NULL_HANDLE)
+    {}
+
+    DescriptorSet( std::nullptr_t )
+      : m_descriptorSet(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet )
+       : m_descriptorSet( descriptorSet )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSet & operator=(VkDescriptorSet descriptorSet)
+    {
+      m_descriptorSet = descriptorSet;
+      return *this; 
+    }
+#endif
+
+    DescriptorSet & operator=( std::nullptr_t )
+    {
+      m_descriptorSet = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorSet const & rhs ) const
+    {
+      return m_descriptorSet == rhs.m_descriptorSet;
+    }
+
+    bool operator!=(DescriptorSet const & rhs ) const
+    {
+      return m_descriptorSet != rhs.m_descriptorSet;
+    }
+
+    bool operator<(DescriptorSet const & rhs ) const
+    {
+      return m_descriptorSet < rhs.m_descriptorSet;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const
+    {
+      return m_descriptorSet;
+    }
+
+    explicit operator bool() const
+    {
+      return m_descriptorSet != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_descriptorSet == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSet m_descriptorSet;
+  };
+
+  static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+
+  class DescriptorSetLayout
+  {
+  public:
+    DescriptorSetLayout()
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    DescriptorSetLayout( std::nullptr_t )
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout )
+       : m_descriptorSetLayout( descriptorSetLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout)
+    {
+      m_descriptorSetLayout = descriptorSetLayout;
+      return *this; 
+    }
+#endif
+
+    DescriptorSetLayout & operator=( std::nullptr_t )
+    {
+      m_descriptorSetLayout = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorSetLayout const & rhs ) const
+    {
+      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+    }
+
+    bool operator!=(DescriptorSetLayout const & rhs ) const
+    {
+      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+    }
+
+    bool operator<(DescriptorSetLayout const & rhs ) const
+    {
+      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const
+    {
+      return m_descriptorSetLayout;
+    }
+
+    explicit operator bool() const
+    {
+      return m_descriptorSetLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_descriptorSetLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSetLayout m_descriptorSetLayout;
+  };
+
+  static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+
+  class DescriptorPool
+  {
+  public:
+    DescriptorPool()
+      : m_descriptorPool(VK_NULL_HANDLE)
+    {}
+
+    DescriptorPool( std::nullptr_t )
+      : m_descriptorPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool )
+       : m_descriptorPool( descriptorPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorPool & operator=(VkDescriptorPool descriptorPool)
+    {
+      m_descriptorPool = descriptorPool;
+      return *this; 
+    }
+#endif
+
+    DescriptorPool & operator=( std::nullptr_t )
+    {
+      m_descriptorPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorPool const & rhs ) const
+    {
+      return m_descriptorPool == rhs.m_descriptorPool;
+    }
+
+    bool operator!=(DescriptorPool const & rhs ) const
+    {
+      return m_descriptorPool != rhs.m_descriptorPool;
+    }
+
+    bool operator<(DescriptorPool const & rhs ) const
+    {
+      return m_descriptorPool < rhs.m_descriptorPool;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const
+    {
+      return m_descriptorPool;
+    }
+
+    explicit operator bool() const
+    {
+      return m_descriptorPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_descriptorPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorPool m_descriptorPool;
+  };
+
+  static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+
+  class Fence
+  {
+  public:
+    Fence()
+      : m_fence(VK_NULL_HANDLE)
+    {}
+
+    Fence( std::nullptr_t )
+      : m_fence(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence )
+       : m_fence( fence )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Fence & operator=(VkFence fence)
+    {
+      m_fence = fence;
+      return *this; 
+    }
+#endif
+
+    Fence & operator=( std::nullptr_t )
+    {
+      m_fence = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Fence const & rhs ) const
+    {
+      return m_fence == rhs.m_fence;
+    }
+
+    bool operator!=(Fence const & rhs ) const
+    {
+      return m_fence != rhs.m_fence;
+    }
+
+    bool operator<(Fence const & rhs ) const
+    {
+      return m_fence < rhs.m_fence;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const
+    {
+      return m_fence;
+    }
+
+    explicit operator bool() const
+    {
+      return m_fence != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_fence == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFence m_fence;
+  };
+
+  static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+
+  class Semaphore
+  {
+  public:
+    Semaphore()
+      : m_semaphore(VK_NULL_HANDLE)
+    {}
+
+    Semaphore( std::nullptr_t )
+      : m_semaphore(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore )
+       : m_semaphore( semaphore )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Semaphore & operator=(VkSemaphore semaphore)
+    {
+      m_semaphore = semaphore;
+      return *this; 
+    }
+#endif
+
+    Semaphore & operator=( std::nullptr_t )
+    {
+      m_semaphore = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Semaphore const & rhs ) const
+    {
+      return m_semaphore == rhs.m_semaphore;
+    }
+
+    bool operator!=(Semaphore const & rhs ) const
+    {
+      return m_semaphore != rhs.m_semaphore;
+    }
+
+    bool operator<(Semaphore const & rhs ) const
+    {
+      return m_semaphore < rhs.m_semaphore;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const
+    {
+      return m_semaphore;
+    }
+
+    explicit operator bool() const
+    {
+      return m_semaphore != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_semaphore == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSemaphore m_semaphore;
+  };
+
+  static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
+
+  class Event
+  {
+  public:
+    Event()
+      : m_event(VK_NULL_HANDLE)
+    {}
+
+    Event( std::nullptr_t )
+      : m_event(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event )
+       : m_event( event )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Event & operator=(VkEvent event)
+    {
+      m_event = event;
+      return *this; 
+    }
+#endif
+
+    Event & operator=( std::nullptr_t )
+    {
+      m_event = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Event const & rhs ) const
+    {
+      return m_event == rhs.m_event;
+    }
+
+    bool operator!=(Event const & rhs ) const
+    {
+      return m_event != rhs.m_event;
+    }
+
+    bool operator<(Event const & rhs ) const
+    {
+      return m_event < rhs.m_event;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const
+    {
+      return m_event;
+    }
+
+    explicit operator bool() const
+    {
+      return m_event != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_event == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkEvent m_event;
+  };
+
+  static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+
+  class QueryPool
+  {
+  public:
+    QueryPool()
+      : m_queryPool(VK_NULL_HANDLE)
+    {}
+
+    QueryPool( std::nullptr_t )
+      : m_queryPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool )
+       : m_queryPool( queryPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    QueryPool & operator=(VkQueryPool queryPool)
+    {
+      m_queryPool = queryPool;
+      return *this; 
+    }
+#endif
+
+    QueryPool & operator=( std::nullptr_t )
+    {
+      m_queryPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( QueryPool const & rhs ) const
+    {
+      return m_queryPool == rhs.m_queryPool;
+    }
+
+    bool operator!=(QueryPool const & rhs ) const
+    {
+      return m_queryPool != rhs.m_queryPool;
+    }
+
+    bool operator<(QueryPool const & rhs ) const
+    {
+      return m_queryPool < rhs.m_queryPool;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const
+    {
+      return m_queryPool;
+    }
+
+    explicit operator bool() const
+    {
+      return m_queryPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_queryPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueryPool m_queryPool;
+  };
+
+  static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+
+  class Framebuffer
+  {
+  public:
+    Framebuffer()
+      : m_framebuffer(VK_NULL_HANDLE)
+    {}
+
+    Framebuffer( std::nullptr_t )
+      : m_framebuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer )
+       : m_framebuffer( framebuffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Framebuffer & operator=(VkFramebuffer framebuffer)
+    {
+      m_framebuffer = framebuffer;
+      return *this; 
+    }
+#endif
+
+    Framebuffer & operator=( std::nullptr_t )
+    {
+      m_framebuffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Framebuffer const & rhs ) const
+    {
+      return m_framebuffer == rhs.m_framebuffer;
+    }
+
+    bool operator!=(Framebuffer const & rhs ) const
+    {
+      return m_framebuffer != rhs.m_framebuffer;
+    }
+
+    bool operator<(Framebuffer const & rhs ) const
+    {
+      return m_framebuffer < rhs.m_framebuffer;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const
+    {
+      return m_framebuffer;
+    }
+
+    explicit operator bool() const
+    {
+      return m_framebuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_framebuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFramebuffer m_framebuffer;
+  };
+
+  static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+
+  class RenderPass
+  {
+  public:
+    RenderPass()
+      : m_renderPass(VK_NULL_HANDLE)
+    {}
+
+    RenderPass( std::nullptr_t )
+      : m_renderPass(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass )
+       : m_renderPass( renderPass )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    RenderPass & operator=(VkRenderPass renderPass)
+    {
+      m_renderPass = renderPass;
+      return *this; 
+    }
+#endif
+
+    RenderPass & operator=( std::nullptr_t )
+    {
+      m_renderPass = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( RenderPass const & rhs ) const
+    {
+      return m_renderPass == rhs.m_renderPass;
+    }
+
+    bool operator!=(RenderPass const & rhs ) const
+    {
+      return m_renderPass != rhs.m_renderPass;
+    }
+
+    bool operator<(RenderPass const & rhs ) const
+    {
+      return m_renderPass < rhs.m_renderPass;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const
+    {
+      return m_renderPass;
+    }
+
+    explicit operator bool() const
+    {
+      return m_renderPass != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_renderPass == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkRenderPass m_renderPass;
+  };
+
+  static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
+
+  class PipelineCache
+  {
+  public:
+    PipelineCache()
+      : m_pipelineCache(VK_NULL_HANDLE)
+    {}
+
+    PipelineCache( std::nullptr_t )
+      : m_pipelineCache(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache )
+       : m_pipelineCache( pipelineCache )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineCache & operator=(VkPipelineCache pipelineCache)
+    {
+      m_pipelineCache = pipelineCache;
+      return *this; 
+    }
+#endif
+
+    PipelineCache & operator=( std::nullptr_t )
+    {
+      m_pipelineCache = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( PipelineCache const & rhs ) const
+    {
+      return m_pipelineCache == rhs.m_pipelineCache;
+    }
+
+    bool operator!=(PipelineCache const & rhs ) const
+    {
+      return m_pipelineCache != rhs.m_pipelineCache;
+    }
+
+    bool operator<(PipelineCache const & rhs ) const
+    {
+      return m_pipelineCache < rhs.m_pipelineCache;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const
+    {
+      return m_pipelineCache;
+    }
+
+    explicit operator bool() const
+    {
+      return m_pipelineCache != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_pipelineCache == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineCache m_pipelineCache;
+  };
+
+  static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+
+  class ObjectTableNVX
+  {
+  public:
+    ObjectTableNVX()
+      : m_objectTableNVX(VK_NULL_HANDLE)
+    {}
+
+    ObjectTableNVX( std::nullptr_t )
+      : m_objectTableNVX(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX )
+       : m_objectTableNVX( objectTableNVX )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX)
+    {
+      m_objectTableNVX = objectTableNVX;
+      return *this; 
+    }
+#endif
+
+    ObjectTableNVX & operator=( std::nullptr_t )
+    {
+      m_objectTableNVX = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( ObjectTableNVX const & rhs ) const
+    {
+      return m_objectTableNVX == rhs.m_objectTableNVX;
+    }
+
+    bool operator!=(ObjectTableNVX const & rhs ) const
+    {
+      return m_objectTableNVX != rhs.m_objectTableNVX;
+    }
+
+    bool operator<(ObjectTableNVX const & rhs ) const
+    {
+      return m_objectTableNVX < rhs.m_objectTableNVX;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const
+    {
+      return m_objectTableNVX;
+    }
+
+    explicit operator bool() const
+    {
+      return m_objectTableNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_objectTableNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkObjectTableNVX m_objectTableNVX;
+  };
+
+  static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
+
+  class IndirectCommandsLayoutNVX
+  {
+  public:
+    IndirectCommandsLayoutNVX()
+      : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+    {}
+
+    IndirectCommandsLayoutNVX( std::nullptr_t )
+      : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX )
+       : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
+    {
+      m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
+      return *this; 
+    }
+#endif
+
+    IndirectCommandsLayoutNVX & operator=( std::nullptr_t )
+    {
+      m_indirectCommandsLayoutNVX = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( IndirectCommandsLayoutNVX const & rhs ) const
+    {
+      return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
+    }
+
+    bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const
+    {
+      return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
+    }
+
+    bool operator<(IndirectCommandsLayoutNVX const & rhs ) const
+    {
+      return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const
+    {
+      return m_indirectCommandsLayoutNVX;
+    }
+
+    explicit operator bool() const
+    {
+      return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
+  };
+
+  static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
+
+  class DescriptorUpdateTemplateKHR
+  {
+  public:
+    DescriptorUpdateTemplateKHR()
+      : m_descriptorUpdateTemplateKHR(VK_NULL_HANDLE)
+    {}
+
+    DescriptorUpdateTemplateKHR( std::nullptr_t )
+      : m_descriptorUpdateTemplateKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplateKHR( VkDescriptorUpdateTemplateKHR descriptorUpdateTemplateKHR )
+       : m_descriptorUpdateTemplateKHR( descriptorUpdateTemplateKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorUpdateTemplateKHR & operator=(VkDescriptorUpdateTemplateKHR descriptorUpdateTemplateKHR)
+    {
+      m_descriptorUpdateTemplateKHR = descriptorUpdateTemplateKHR;
+      return *this; 
+    }
+#endif
+
+    DescriptorUpdateTemplateKHR & operator=( std::nullptr_t )
+    {
+      m_descriptorUpdateTemplateKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorUpdateTemplateKHR const & rhs ) const
+    {
+      return m_descriptorUpdateTemplateKHR == rhs.m_descriptorUpdateTemplateKHR;
+    }
+
+    bool operator!=(DescriptorUpdateTemplateKHR const & rhs ) const
+    {
+      return m_descriptorUpdateTemplateKHR != rhs.m_descriptorUpdateTemplateKHR;
+    }
+
+    bool operator<(DescriptorUpdateTemplateKHR const & rhs ) const
+    {
+      return m_descriptorUpdateTemplateKHR < rhs.m_descriptorUpdateTemplateKHR;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplateKHR() const
+    {
+      return m_descriptorUpdateTemplateKHR;
+    }
+
+    explicit operator bool() const
+    {
+      return m_descriptorUpdateTemplateKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_descriptorUpdateTemplateKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorUpdateTemplateKHR m_descriptorUpdateTemplateKHR;
+  };
+
+  static_assert( sizeof( DescriptorUpdateTemplateKHR ) == sizeof( VkDescriptorUpdateTemplateKHR ), "handle and wrapper have different size!" );
+
+  class DisplayKHR
+  {
+  public:
+    DisplayKHR()
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
+
+    DisplayKHR( std::nullptr_t )
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR )
+       : m_displayKHR( displayKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayKHR & operator=(VkDisplayKHR displayKHR)
+    {
+      m_displayKHR = displayKHR;
+      return *this; 
+    }
+#endif
+
+    DisplayKHR & operator=( std::nullptr_t )
+    {
+      m_displayKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DisplayKHR const & rhs ) const
+    {
+      return m_displayKHR == rhs.m_displayKHR;
+    }
+
+    bool operator!=(DisplayKHR const & rhs ) const
+    {
+      return m_displayKHR != rhs.m_displayKHR;
+    }
+
+    bool operator<(DisplayKHR const & rhs ) const
+    {
+      return m_displayKHR < rhs.m_displayKHR;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const
+    {
+      return m_displayKHR;
+    }
+
+    explicit operator bool() const
+    {
+      return m_displayKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_displayKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayKHR m_displayKHR;
+  };
+
+  static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+
+  class DisplayModeKHR
+  {
+  public:
+    DisplayModeKHR()
+      : m_displayModeKHR(VK_NULL_HANDLE)
+    {}
+
+    DisplayModeKHR( std::nullptr_t )
+      : m_displayModeKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR )
+       : m_displayModeKHR( displayModeKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR)
+    {
+      m_displayModeKHR = displayModeKHR;
+      return *this; 
+    }
+#endif
+
+    DisplayModeKHR & operator=( std::nullptr_t )
+    {
+      m_displayModeKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DisplayModeKHR const & rhs ) const
+    {
+      return m_displayModeKHR == rhs.m_displayModeKHR;
+    }
+
+    bool operator!=(DisplayModeKHR const & rhs ) const
+    {
+      return m_displayModeKHR != rhs.m_displayModeKHR;
+    }
+
+    bool operator<(DisplayModeKHR const & rhs ) const
+    {
+      return m_displayModeKHR < rhs.m_displayModeKHR;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const
+    {
+      return m_displayModeKHR;
+    }
+
+    explicit operator bool() const
+    {
+      return m_displayModeKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_displayModeKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayModeKHR m_displayModeKHR;
+  };
+
+  static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+
+  class SurfaceKHR
+  {
+  public:
+    SurfaceKHR()
+      : m_surfaceKHR(VK_NULL_HANDLE)
+    {}
+
+    SurfaceKHR( std::nullptr_t )
+      : m_surfaceKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR )
+       : m_surfaceKHR( surfaceKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR)
+    {
+      m_surfaceKHR = surfaceKHR;
+      return *this; 
+    }
+#endif
+
+    SurfaceKHR & operator=( std::nullptr_t )
+    {
+      m_surfaceKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( SurfaceKHR const & rhs ) const
+    {
+      return m_surfaceKHR == rhs.m_surfaceKHR;
+    }
+
+    bool operator!=(SurfaceKHR const & rhs ) const
+    {
+      return m_surfaceKHR != rhs.m_surfaceKHR;
+    }
+
+    bool operator<(SurfaceKHR const & rhs ) const
+    {
+      return m_surfaceKHR < rhs.m_surfaceKHR;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const
+    {
+      return m_surfaceKHR;
+    }
+
+    explicit operator bool() const
+    {
+      return m_surfaceKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_surfaceKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSurfaceKHR m_surfaceKHR;
+  };
+
+  static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
+
+  class SwapchainKHR
+  {
+  public:
+    SwapchainKHR()
+      : m_swapchainKHR(VK_NULL_HANDLE)
+    {}
+
+    SwapchainKHR( std::nullptr_t )
+      : m_swapchainKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR )
+       : m_swapchainKHR( swapchainKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR)
+    {
+      m_swapchainKHR = swapchainKHR;
+      return *this; 
+    }
+#endif
+
+    SwapchainKHR & operator=( std::nullptr_t )
+    {
+      m_swapchainKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( SwapchainKHR const & rhs ) const
+    {
+      return m_swapchainKHR == rhs.m_swapchainKHR;
+    }
+
+    bool operator!=(SwapchainKHR const & rhs ) const
+    {
+      return m_swapchainKHR != rhs.m_swapchainKHR;
+    }
+
+    bool operator<(SwapchainKHR const & rhs ) const
+    {
+      return m_swapchainKHR < rhs.m_swapchainKHR;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const
+    {
+      return m_swapchainKHR;
+    }
+
+    explicit operator bool() const
+    {
+      return m_swapchainKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_swapchainKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSwapchainKHR m_swapchainKHR;
+  };
+
+  static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
+
+  class DebugReportCallbackEXT
+  {
+  public:
+    DebugReportCallbackEXT()
+      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+    {}
+
+    DebugReportCallbackEXT( std::nullptr_t )
+      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT )
+       : m_debugReportCallbackEXT( debugReportCallbackEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT)
+    {
+      m_debugReportCallbackEXT = debugReportCallbackEXT;
+      return *this; 
+    }
+#endif
+
+    DebugReportCallbackEXT & operator=( std::nullptr_t )
+    {
+      m_debugReportCallbackEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DebugReportCallbackEXT const & rhs ) const
+    {
+      return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator!=(DebugReportCallbackEXT const & rhs ) const
+    {
+      return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator<(DebugReportCallbackEXT const & rhs ) const
+    {
+      return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
+    }
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const
+    {
+      return m_debugReportCallbackEXT;
+    }
+
+    explicit operator bool() const
+    {
+      return m_debugReportCallbackEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_debugReportCallbackEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDebugReportCallbackEXT m_debugReportCallbackEXT;
+  };
+
+  static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
+
+  struct Offset2D
+  {
+    Offset2D( int32_t x_ = 0, int32_t y_ = 0 )
+      : x( x_ )
+      , y( y_ )
+    {
+    }
+
+    Offset2D( VkOffset2D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Offset2D ) );
+    }
+
+    Offset2D& operator=( VkOffset2D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Offset2D ) );
+      return *this;
+    }
+    Offset2D& setX( int32_t x_ )
+    {
+      x = x_;
+      return *this;
+    }
+
+    Offset2D& setY( int32_t y_ )
+    {
+      y = y_;
+      return *this;
+    }
+
+    operator const VkOffset2D&() const
+    {
+      return *reinterpret_cast<const VkOffset2D*>(this);
+    }
+
+    bool operator==( Offset2D const& rhs ) const
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( Offset2D const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    int32_t x;
+    int32_t y;
+  };
+  static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
+
+  struct Offset3D
+  {
+    Offset3D( int32_t x_ = 0, int32_t y_ = 0, int32_t z_ = 0 )
+      : x( x_ )
+      , y( y_ )
+      , z( z_ )
+    {
+    }
+
+    Offset3D( VkOffset3D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Offset3D ) );
+    }
+
+    Offset3D& operator=( VkOffset3D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Offset3D ) );
+      return *this;
+    }
+    Offset3D& setX( int32_t x_ )
+    {
+      x = x_;
+      return *this;
+    }
+
+    Offset3D& setY( int32_t y_ )
+    {
+      y = y_;
+      return *this;
+    }
+
+    Offset3D& setZ( int32_t z_ )
+    {
+      z = z_;
+      return *this;
+    }
+
+    operator const VkOffset3D&() const
+    {
+      return *reinterpret_cast<const VkOffset3D*>(this);
+    }
+
+    bool operator==( Offset3D const& rhs ) const
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z );
+    }
+
+    bool operator!=( Offset3D const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    int32_t x;
+    int32_t y;
+    int32_t z;
+  };
+  static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
+
+  struct Extent2D
+  {
+    Extent2D( uint32_t width_ = 0, uint32_t height_ = 0 )
+      : width( width_ )
+      , height( height_ )
+    {
+    }
+
+    Extent2D( VkExtent2D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Extent2D ) );
+    }
+
+    Extent2D& operator=( VkExtent2D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Extent2D ) );
+      return *this;
+    }
+    Extent2D& setWidth( uint32_t width_ )
+    {
+      width = width_;
+      return *this;
+    }
+
+    Extent2D& setHeight( uint32_t height_ )
+    {
+      height = height_;
+      return *this;
+    }
+
+    operator const VkExtent2D&() const
+    {
+      return *reinterpret_cast<const VkExtent2D*>(this);
+    }
+
+    bool operator==( Extent2D const& rhs ) const
+    {
+      return ( width == rhs.width )
+          && ( height == rhs.height );
+    }
+
+    bool operator!=( Extent2D const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t width;
+    uint32_t height;
+  };
+  static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
+
+  struct Extent3D
+  {
+    Extent3D( uint32_t width_ = 0, uint32_t height_ = 0, uint32_t depth_ = 0 )
+      : width( width_ )
+      , height( height_ )
+      , depth( depth_ )
+    {
+    }
+
+    Extent3D( VkExtent3D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Extent3D ) );
+    }
+
+    Extent3D& operator=( VkExtent3D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Extent3D ) );
+      return *this;
+    }
+    Extent3D& setWidth( uint32_t width_ )
+    {
+      width = width_;
+      return *this;
+    }
+
+    Extent3D& setHeight( uint32_t height_ )
+    {
+      height = height_;
+      return *this;
+    }
+
+    Extent3D& setDepth( uint32_t depth_ )
+    {
+      depth = depth_;
+      return *this;
+    }
+
+    operator const VkExtent3D&() const
+    {
+      return *reinterpret_cast<const VkExtent3D*>(this);
+    }
+
+    bool operator==( Extent3D const& rhs ) const
+    {
+      return ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( depth == rhs.depth );
+    }
+
+    bool operator!=( Extent3D const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t width;
+    uint32_t height;
+    uint32_t depth;
+  };
+  static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
+
+  struct Viewport
+  {
+    Viewport( float x_ = 0, float y_ = 0, float width_ = 0, float height_ = 0, float minDepth_ = 0, float maxDepth_ = 0 )
+      : x( x_ )
+      , y( y_ )
+      , width( width_ )
+      , height( height_ )
+      , minDepth( minDepth_ )
+      , maxDepth( maxDepth_ )
+    {
+    }
+
+    Viewport( VkViewport const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Viewport ) );
+    }
+
+    Viewport& operator=( VkViewport const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Viewport ) );
+      return *this;
+    }
+    Viewport& setX( float x_ )
+    {
+      x = x_;
+      return *this;
+    }
+
+    Viewport& setY( float y_ )
+    {
+      y = y_;
+      return *this;
+    }
+
+    Viewport& setWidth( float width_ )
+    {
+      width = width_;
+      return *this;
+    }
+
+    Viewport& setHeight( float height_ )
+    {
+      height = height_;
+      return *this;
+    }
+
+    Viewport& setMinDepth( float minDepth_ )
+    {
+      minDepth = minDepth_;
+      return *this;
+    }
+
+    Viewport& setMaxDepth( float maxDepth_ )
+    {
+      maxDepth = maxDepth_;
+      return *this;
+    }
+
+    operator const VkViewport&() const
+    {
+      return *reinterpret_cast<const VkViewport*>(this);
+    }
+
+    bool operator==( Viewport const& rhs ) const
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( minDepth == rhs.minDepth )
+          && ( maxDepth == rhs.maxDepth );
+    }
+
+    bool operator!=( Viewport const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    float x;
+    float y;
+    float width;
+    float height;
+    float minDepth;
+    float maxDepth;
+  };
+  static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+
+  struct Rect2D
+  {
+    Rect2D( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D() )
+      : offset( offset_ )
+      , extent( extent_ )
+    {
+    }
+
+    Rect2D( VkRect2D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Rect2D ) );
+    }
+
+    Rect2D& operator=( VkRect2D const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Rect2D ) );
+      return *this;
+    }
+    Rect2D& setOffset( Offset2D offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    Rect2D& setExtent( Extent2D extent_ )
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    operator const VkRect2D&() const
+    {
+      return *reinterpret_cast<const VkRect2D*>(this);
+    }
+
+    bool operator==( Rect2D const& rhs ) const
+    {
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( Rect2D const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Offset2D offset;
+    Extent2D extent;
+  };
+  static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
+
+  struct ClearRect
+  {
+    ClearRect( Rect2D rect_ = Rect2D(), uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
+      : rect( rect_ )
+      , baseArrayLayer( baseArrayLayer_ )
+      , layerCount( layerCount_ )
+    {
+    }
+
+    ClearRect( VkClearRect const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ClearRect ) );
+    }
+
+    ClearRect& operator=( VkClearRect const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ClearRect ) );
+      return *this;
+    }
+    ClearRect& setRect( Rect2D rect_ )
+    {
+      rect = rect_;
+      return *this;
+    }
+
+    ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ )
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ClearRect& setLayerCount( uint32_t layerCount_ )
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    operator const VkClearRect&() const
+    {
+      return *reinterpret_cast<const VkClearRect*>(this);
+    }
+
+    bool operator==( ClearRect const& rhs ) const
+    {
+      return ( rect == rhs.rect )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ClearRect const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Rect2D rect;
+    uint32_t baseArrayLayer;
+    uint32_t layerCount;
+  };
+  static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
+
+  struct ExtensionProperties
+  {
+    operator const VkExtensionProperties&() const
+    {
+      return *reinterpret_cast<const VkExtensionProperties*>(this);
+    }
+
+    bool operator==( ExtensionProperties const& rhs ) const
+    {
+      return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( specVersion == rhs.specVersion );
+    }
+
+    bool operator!=( ExtensionProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t specVersion;
+  };
+  static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+
+  struct LayerProperties
+  {
+    operator const VkLayerProperties&() const
+    {
+      return *reinterpret_cast<const VkLayerProperties*>(this);
+    }
+
+    bool operator==( LayerProperties const& rhs ) const
+    {
+      return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( specVersion == rhs.specVersion )
+          && ( implementationVersion == rhs.implementationVersion )
+          && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
+    }
+
+    bool operator!=( LayerProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    char layerName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t specVersion;
+    uint32_t implementationVersion;
+    char description[VK_MAX_DESCRIPTION_SIZE];
+  };
+  static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+
+  struct AllocationCallbacks
+  {
+    AllocationCallbacks( void* pUserData_ = nullptr, PFN_vkAllocationFunction pfnAllocation_ = nullptr, PFN_vkReallocationFunction pfnReallocation_ = nullptr, PFN_vkFreeFunction pfnFree_ = nullptr, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr )
+      : pUserData( pUserData_ )
+      , pfnAllocation( pfnAllocation_ )
+      , pfnReallocation( pfnReallocation_ )
+      , pfnFree( pfnFree_ )
+      , pfnInternalAllocation( pfnInternalAllocation_ )
+      , pfnInternalFree( pfnInternalFree_ )
+    {
+    }
+
+    AllocationCallbacks( VkAllocationCallbacks const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AllocationCallbacks ) );
+    }
+
+    AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AllocationCallbacks ) );
+      return *this;
+    }
+    AllocationCallbacks& setPUserData( void* pUserData_ )
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ )
+    {
+      pfnAllocation = pfnAllocation_;
+      return *this;
+    }
+
+    AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ )
+    {
+      pfnReallocation = pfnReallocation_;
+      return *this;
+    }
+
+    AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ )
+    {
+      pfnFree = pfnFree_;
+      return *this;
+    }
+
+    AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ )
+    {
+      pfnInternalAllocation = pfnInternalAllocation_;
+      return *this;
+    }
+
+    AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ )
+    {
+      pfnInternalFree = pfnInternalFree_;
+      return *this;
+    }
+
+    operator const VkAllocationCallbacks&() const
+    {
+      return *reinterpret_cast<const VkAllocationCallbacks*>(this);
+    }
+
+    bool operator==( AllocationCallbacks const& rhs ) const
+    {
+      return ( pUserData == rhs.pUserData )
+          && ( pfnAllocation == rhs.pfnAllocation )
+          && ( pfnReallocation == rhs.pfnReallocation )
+          && ( pfnFree == rhs.pfnFree )
+          && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
+          && ( pfnInternalFree == rhs.pfnInternalFree );
+    }
+
+    bool operator!=( AllocationCallbacks const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    void* pUserData;
+    PFN_vkAllocationFunction pfnAllocation;
+    PFN_vkReallocationFunction pfnReallocation;
+    PFN_vkFreeFunction pfnFree;
+    PFN_vkInternalAllocationNotification pfnInternalAllocation;
+    PFN_vkInternalFreeNotification pfnInternalFree;
+  };
+  static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
+
+  struct MemoryRequirements
+  {
+    operator const VkMemoryRequirements&() const
+    {
+      return *reinterpret_cast<const VkMemoryRequirements*>(this);
+    }
+
+    bool operator==( MemoryRequirements const& rhs ) const
+    {
+      return ( size == rhs.size )
+          && ( alignment == rhs.alignment )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryRequirements const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DeviceSize size;
+    DeviceSize alignment;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+
+  struct DescriptorBufferInfo
+  {
+    DescriptorBufferInfo( Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize range_ = 0 )
+      : buffer( buffer_ )
+      , offset( offset_ )
+      , range( range_ )
+    {
+    }
+
+    DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) );
+    }
+
+    DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) );
+      return *this;
+    }
+    DescriptorBufferInfo& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    DescriptorBufferInfo& setOffset( DeviceSize offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    DescriptorBufferInfo& setRange( DeviceSize range_ )
+    {
+      range = range_;
+      return *this;
+    }
+
+    operator const VkDescriptorBufferInfo&() const
+    {
+      return *reinterpret_cast<const VkDescriptorBufferInfo*>(this);
+    }
+
+    bool operator==( DescriptorBufferInfo const& rhs ) const
+    {
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+    }
+
+    bool operator!=( DescriptorBufferInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Buffer buffer;
+    DeviceSize offset;
+    DeviceSize range;
+  };
+  static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+
+  struct SubresourceLayout
+  {
+    operator const VkSubresourceLayout&() const
+    {
+      return *reinterpret_cast<const VkSubresourceLayout*>(this);
+    }
+
+    bool operator==( SubresourceLayout const& rhs ) const
+    {
+      return ( offset == rhs.offset )
+          && ( size == rhs.size )
+          && ( rowPitch == rhs.rowPitch )
+          && ( arrayPitch == rhs.arrayPitch )
+          && ( depthPitch == rhs.depthPitch );
+    }
+
+    bool operator!=( SubresourceLayout const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DeviceSize offset;
+    DeviceSize size;
+    DeviceSize rowPitch;
+    DeviceSize arrayPitch;
+    DeviceSize depthPitch;
+  };
+  static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+
+  struct BufferCopy
+  {
+    BufferCopy( DeviceSize srcOffset_ = 0, DeviceSize dstOffset_ = 0, DeviceSize size_ = 0 )
+      : srcOffset( srcOffset_ )
+      , dstOffset( dstOffset_ )
+      , size( size_ )
+    {
+    }
+
+    BufferCopy( VkBufferCopy const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferCopy ) );
+    }
+
+    BufferCopy& operator=( VkBufferCopy const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferCopy ) );
+      return *this;
+    }
+    BufferCopy& setSrcOffset( DeviceSize srcOffset_ )
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    BufferCopy& setDstOffset( DeviceSize dstOffset_ )
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    BufferCopy& setSize( DeviceSize size_ )
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator const VkBufferCopy&() const
+    {
+      return *reinterpret_cast<const VkBufferCopy*>(this);
+    }
+
+    bool operator==( BufferCopy const& rhs ) const
+    {
+      return ( srcOffset == rhs.srcOffset )
+          && ( dstOffset == rhs.dstOffset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferCopy const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DeviceSize srcOffset;
+    DeviceSize dstOffset;
+    DeviceSize size;
+  };
+  static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
+
+  struct SpecializationMapEntry
+  {
+    SpecializationMapEntry( uint32_t constantID_ = 0, uint32_t offset_ = 0, size_t size_ = 0 )
+      : constantID( constantID_ )
+      , offset( offset_ )
+      , size( size_ )
+    {
+    }
+
+    SpecializationMapEntry( VkSpecializationMapEntry const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SpecializationMapEntry ) );
+    }
+
+    SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SpecializationMapEntry ) );
+      return *this;
+    }
+    SpecializationMapEntry& setConstantID( uint32_t constantID_ )
+    {
+      constantID = constantID_;
+      return *this;
+    }
+
+    SpecializationMapEntry& setOffset( uint32_t offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    SpecializationMapEntry& setSize( size_t size_ )
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator const VkSpecializationMapEntry&() const
+    {
+      return *reinterpret_cast<const VkSpecializationMapEntry*>(this);
+    }
+
+    bool operator==( SpecializationMapEntry const& rhs ) const
+    {
+      return ( constantID == rhs.constantID )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( SpecializationMapEntry const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t constantID;
+    uint32_t offset;
+    size_t size;
+  };
+  static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
+
+  struct SpecializationInfo
+  {
+    SpecializationInfo( uint32_t mapEntryCount_ = 0, const SpecializationMapEntry* pMapEntries_ = nullptr, size_t dataSize_ = 0, const void* pData_ = nullptr )
+      : mapEntryCount( mapEntryCount_ )
+      , pMapEntries( pMapEntries_ )
+      , dataSize( dataSize_ )
+      , pData( pData_ )
+    {
+    }
+
+    SpecializationInfo( VkSpecializationInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SpecializationInfo ) );
+    }
+
+    SpecializationInfo& operator=( VkSpecializationInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SpecializationInfo ) );
+      return *this;
+    }
+    SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ )
+    {
+      mapEntryCount = mapEntryCount_;
+      return *this;
+    }
+
+    SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ )
+    {
+      pMapEntries = pMapEntries_;
+      return *this;
+    }
+
+    SpecializationInfo& setDataSize( size_t dataSize_ )
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    SpecializationInfo& setPData( const void* pData_ )
+    {
+      pData = pData_;
+      return *this;
+    }
+
+    operator const VkSpecializationInfo&() const
+    {
+      return *reinterpret_cast<const VkSpecializationInfo*>(this);
+    }
+
+    bool operator==( SpecializationInfo const& rhs ) const
+    {
+      return ( mapEntryCount == rhs.mapEntryCount )
+          && ( pMapEntries == rhs.pMapEntries )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+    }
+
+    bool operator!=( SpecializationInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t mapEntryCount;
+    const SpecializationMapEntry* pMapEntries;
+    size_t dataSize;
+    const void* pData;
+  };
+  static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+
+  union ClearColorValue
+  {
+    ClearColorValue( const std::array<float,4>& float32_ = { {0} } )
+    {
+      memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
+    }
+
+    ClearColorValue( const std::array<int32_t,4>& int32_ )
+    {
+      memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
+    }
+
+    ClearColorValue( const std::array<uint32_t,4>& uint32_ )
+    {
+      memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
+    }
+
+    ClearColorValue& setFloat32( std::array<float,4> float32_ )
+    {
+      memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
+      return *this;
+    }
+
+    ClearColorValue& setInt32( std::array<int32_t,4> int32_ )
+    {
+      memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
+      return *this;
+    }
+
+    ClearColorValue& setUint32( std::array<uint32_t,4> uint32_ )
+    {
+      memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
+      return *this;
+    }
+
+    operator VkClearColorValue const& () const
+    {
+      return *reinterpret_cast<const VkClearColorValue*>(this);
+    }
+
+    float float32[4];
+    int32_t int32[4];
+    uint32_t uint32[4];
+  };
+
+  struct ClearDepthStencilValue
+  {
+    ClearDepthStencilValue( float depth_ = 0, uint32_t stencil_ = 0 )
+      : depth( depth_ )
+      , stencil( stencil_ )
+    {
+    }
+
+    ClearDepthStencilValue( VkClearDepthStencilValue const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) );
+    }
+
+    ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) );
+      return *this;
+    }
+    ClearDepthStencilValue& setDepth( float depth_ )
+    {
+      depth = depth_;
+      return *this;
+    }
+
+    ClearDepthStencilValue& setStencil( uint32_t stencil_ )
+    {
+      stencil = stencil_;
+      return *this;
+    }
+
+    operator const VkClearDepthStencilValue&() const
+    {
+      return *reinterpret_cast<const VkClearDepthStencilValue*>(this);
+    }
+
+    bool operator==( ClearDepthStencilValue const& rhs ) const
+    {
+      return ( depth == rhs.depth )
+          && ( stencil == rhs.stencil );
+    }
+
+    bool operator!=( ClearDepthStencilValue const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    float depth;
+    uint32_t stencil;
+  };
+  static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
+
+  union ClearValue
+  {
+    ClearValue( ClearColorValue color_ = ClearColorValue() )
+    {
+      color = color_;
+    }
+
+    ClearValue( ClearDepthStencilValue depthStencil_ )
+    {
+      depthStencil = depthStencil_;
+    }
+
+    ClearValue& setColor( ClearColorValue color_ )
+    {
+      color = color_;
+      return *this;
+    }
+
+    ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ )
+    {
+      depthStencil = depthStencil_;
+      return *this;
+    }
+
+    operator VkClearValue const& () const
+    {
+      return *reinterpret_cast<const VkClearValue*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    ClearColorValue color;
+    ClearDepthStencilValue depthStencil;
+#else
+    VkClearColorValue color;
+    VkClearDepthStencilValue depthStencil;
+#endif  // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+  };
+
+  struct PhysicalDeviceFeatures
+  {
+    PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0, Bool32 fullDrawIndexUint32_ = 0, Bool32 imageCubeArray_ = 0, Bool32 independentBlend_ = 0, Bool32 geometryShader_ = 0, Bool32 tessellationShader_ = 0, Bool32 sampleRateShading_ = 0, Bool32 dualSrcBlend_ = 0, Bool32 logicOp_ = 0, Bool32 multiDrawIndirect_ = 0, Bool32 drawIndirectFirstInstance_ = 0, Bool32 depthClamp_ = 0, Bool32 depthBiasClamp_ = 0, Bool32 fillModeNonSolid_ = 0, Bool32 depthBounds_ = 0, Bool32 wideLines_ = 0, Bool32 largePoints_ = 0, Bool32 alphaToOne_ = 0, Bool32 multiViewport_ = 0, Bool32 samplerAnisotropy_ = 0, Bool32 textureCompressionETC2_ = 0, Bool32 textureCompressionASTC_LDR_ = 0, Bool32 textureCompressionBC_ = 0, Bool32 occlusionQueryPrecise_ = 0, Bool32 pipelineStatisticsQuery_ = 0, Bool32 vertexPipelineStoresAndAtomics_ = 0, Bool32 fragmentStoresAndAtomics_ = 0, Bool32 shaderTessellationAndGeometryPointSize_ = 0, Bool32 shaderImageGatherExtended_ = 0, Bool32 shaderStorageImageExtendedFormats_ = 0, Bool32 shaderStorageImageMultisample_ = 0, Bool32 shaderStorageImageReadWithoutFormat_ = 0, Bool32 shaderStorageImageWriteWithoutFormat_ = 0, Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, Bool32 shaderClipDistance_ = 0, Bool32 shaderCullDistance_ = 0, Bool32 shaderFloat64_ = 0, Bool32 shaderInt64_ = 0, Bool32 shaderInt16_ = 0, Bool32 shaderResourceResidency_ = 0, Bool32 shaderResourceMinLod_ = 0, Bool32 sparseBinding_ = 0, Bool32 sparseResidencyBuffer_ = 0, Bool32 sparseResidencyImage2D_ = 0, Bool32 sparseResidencyImage3D_ = 0, Bool32 sparseResidency2Samples_ = 0, Bool32 sparseResidency4Samples_ = 0, Bool32 sparseResidency8Samples_ = 0, Bool32 sparseResidency16Samples_ = 0, Bool32 sparseResidencyAliased_ = 0, Bool32 variableMultisampleRate_ = 0, Bool32 inheritedQueries_ = 0 )
+      : robustBufferAccess( robustBufferAccess_ )
+      , fullDrawIndexUint32( fullDrawIndexUint32_ )
+      , imageCubeArray( imageCubeArray_ )
+      , independentBlend( independentBlend_ )
+      , geometryShader( geometryShader_ )
+      , tessellationShader( tessellationShader_ )
+      , sampleRateShading( sampleRateShading_ )
+      , dualSrcBlend( dualSrcBlend_ )
+      , logicOp( logicOp_ )
+      , multiDrawIndirect( multiDrawIndirect_ )
+      , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
+      , depthClamp( depthClamp_ )
+      , depthBiasClamp( depthBiasClamp_ )
+      , fillModeNonSolid( fillModeNonSolid_ )
+      , depthBounds( depthBounds_ )
+      , wideLines( wideLines_ )
+      , largePoints( largePoints_ )
+      , alphaToOne( alphaToOne_ )
+      , multiViewport( multiViewport_ )
+      , samplerAnisotropy( samplerAnisotropy_ )
+      , textureCompressionETC2( textureCompressionETC2_ )
+      , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
+      , textureCompressionBC( textureCompressionBC_ )
+      , occlusionQueryPrecise( occlusionQueryPrecise_ )
+      , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
+      , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
+      , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
+      , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
+      , shaderImageGatherExtended( shaderImageGatherExtended_ )
+      , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
+      , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
+      , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
+      , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
+      , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
+      , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
+      , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
+      , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
+      , shaderClipDistance( shaderClipDistance_ )
+      , shaderCullDistance( shaderCullDistance_ )
+      , shaderFloat64( shaderFloat64_ )
+      , shaderInt64( shaderInt64_ )
+      , shaderInt16( shaderInt16_ )
+      , shaderResourceResidency( shaderResourceResidency_ )
+      , shaderResourceMinLod( shaderResourceMinLod_ )
+      , sparseBinding( sparseBinding_ )
+      , sparseResidencyBuffer( sparseResidencyBuffer_ )
+      , sparseResidencyImage2D( sparseResidencyImage2D_ )
+      , sparseResidencyImage3D( sparseResidencyImage3D_ )
+      , sparseResidency2Samples( sparseResidency2Samples_ )
+      , sparseResidency4Samples( sparseResidency4Samples_ )
+      , sparseResidency8Samples( sparseResidency8Samples_ )
+      , sparseResidency16Samples( sparseResidency16Samples_ )
+      , sparseResidencyAliased( sparseResidencyAliased_ )
+      , variableMultisampleRate( variableMultisampleRate_ )
+      , inheritedQueries( inheritedQueries_ )
+    {
+    }
+
+    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) );
+    }
+
+    PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) );
+      return *this;
+    }
+    PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ )
+    {
+      robustBufferAccess = robustBufferAccess_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ )
+    {
+      fullDrawIndexUint32 = fullDrawIndexUint32_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ )
+    {
+      imageCubeArray = imageCubeArray_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ )
+    {
+      independentBlend = independentBlend_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ )
+    {
+      geometryShader = geometryShader_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ )
+    {
+      tessellationShader = tessellationShader_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ )
+    {
+      sampleRateShading = sampleRateShading_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ )
+    {
+      dualSrcBlend = dualSrcBlend_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ )
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ )
+    {
+      multiDrawIndirect = multiDrawIndirect_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ )
+    {
+      drawIndirectFirstInstance = drawIndirectFirstInstance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ )
+    {
+      depthClamp = depthClamp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ )
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ )
+    {
+      fillModeNonSolid = fillModeNonSolid_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ )
+    {
+      depthBounds = depthBounds_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ )
+    {
+      wideLines = wideLines_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ )
+    {
+      largePoints = largePoints_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ )
+    {
+      alphaToOne = alphaToOne_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ )
+    {
+      multiViewport = multiViewport_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ )
+    {
+      samplerAnisotropy = samplerAnisotropy_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ )
+    {
+      textureCompressionETC2 = textureCompressionETC2_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ )
+    {
+      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ )
+    {
+      textureCompressionBC = textureCompressionBC_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ )
+    {
+      occlusionQueryPrecise = occlusionQueryPrecise_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ )
+    {
+      pipelineStatisticsQuery = pipelineStatisticsQuery_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ )
+    {
+      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ )
+    {
+      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ )
+    {
+      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ )
+    {
+      shaderImageGatherExtended = shaderImageGatherExtended_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ )
+    {
+      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ )
+    {
+      shaderStorageImageMultisample = shaderStorageImageMultisample_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ )
+    {
+      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ )
+    {
+      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ )
+    {
+      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ )
+    {
+      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ )
+    {
+      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ )
+    {
+      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ )
+    {
+      shaderClipDistance = shaderClipDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ )
+    {
+      shaderCullDistance = shaderCullDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ )
+    {
+      shaderFloat64 = shaderFloat64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ )
+    {
+      shaderInt64 = shaderInt64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ )
+    {
+      shaderInt16 = shaderInt16_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ )
+    {
+      shaderResourceResidency = shaderResourceResidency_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ )
+    {
+      shaderResourceMinLod = shaderResourceMinLod_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ )
+    {
+      sparseBinding = sparseBinding_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ )
+    {
+      sparseResidencyBuffer = sparseResidencyBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ )
+    {
+      sparseResidencyImage2D = sparseResidencyImage2D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ )
+    {
+      sparseResidencyImage3D = sparseResidencyImage3D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ )
+    {
+      sparseResidency2Samples = sparseResidency2Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ )
+    {
+      sparseResidency4Samples = sparseResidency4Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ )
+    {
+      sparseResidency8Samples = sparseResidency8Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ )
+    {
+      sparseResidency16Samples = sparseResidency16Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ )
+    {
+      sparseResidencyAliased = sparseResidencyAliased_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ )
+    {
+      variableMultisampleRate = variableMultisampleRate_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ )
+    {
+      inheritedQueries = inheritedQueries_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceFeatures&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures*>(this);
+    }
+
+    bool operator==( PhysicalDeviceFeatures const& rhs ) const
+    {
+      return ( robustBufferAccess == rhs.robustBufferAccess )
+          && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
+          && ( imageCubeArray == rhs.imageCubeArray )
+          && ( independentBlend == rhs.independentBlend )
+          && ( geometryShader == rhs.geometryShader )
+          && ( tessellationShader == rhs.tessellationShader )
+          && ( sampleRateShading == rhs.sampleRateShading )
+          && ( dualSrcBlend == rhs.dualSrcBlend )
+          && ( logicOp == rhs.logicOp )
+          && ( multiDrawIndirect == rhs.multiDrawIndirect )
+          && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
+          && ( depthClamp == rhs.depthClamp )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( fillModeNonSolid == rhs.fillModeNonSolid )
+          && ( depthBounds == rhs.depthBounds )
+          && ( wideLines == rhs.wideLines )
+          && ( largePoints == rhs.largePoints )
+          && ( alphaToOne == rhs.alphaToOne )
+          && ( multiViewport == rhs.multiViewport )
+          && ( samplerAnisotropy == rhs.samplerAnisotropy )
+          && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
+          && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
+          && ( textureCompressionBC == rhs.textureCompressionBC )
+          && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
+          && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
+          && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
+          && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
+          && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
+          && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
+          && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
+          && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
+          && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
+          && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
+          && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
+          && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
+          && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
+          && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
+          && ( shaderClipDistance == rhs.shaderClipDistance )
+          && ( shaderCullDistance == rhs.shaderCullDistance )
+          && ( shaderFloat64 == rhs.shaderFloat64 )
+          && ( shaderInt64 == rhs.shaderInt64 )
+          && ( shaderInt16 == rhs.shaderInt16 )
+          && ( shaderResourceResidency == rhs.shaderResourceResidency )
+          && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
+          && ( sparseBinding == rhs.sparseBinding )
+          && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
+          && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
+          && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
+          && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
+          && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
+          && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
+          && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
+          && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
+          && ( variableMultisampleRate == rhs.variableMultisampleRate )
+          && ( inheritedQueries == rhs.inheritedQueries );
+    }
+
+    bool operator!=( PhysicalDeviceFeatures const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Bool32 robustBufferAccess;
+    Bool32 fullDrawIndexUint32;
+    Bool32 imageCubeArray;
+    Bool32 independentBlend;
+    Bool32 geometryShader;
+    Bool32 tessellationShader;
+    Bool32 sampleRateShading;
+    Bool32 dualSrcBlend;
+    Bool32 logicOp;
+    Bool32 multiDrawIndirect;
+    Bool32 drawIndirectFirstInstance;
+    Bool32 depthClamp;
+    Bool32 depthBiasClamp;
+    Bool32 fillModeNonSolid;
+    Bool32 depthBounds;
+    Bool32 wideLines;
+    Bool32 largePoints;
+    Bool32 alphaToOne;
+    Bool32 multiViewport;
+    Bool32 samplerAnisotropy;
+    Bool32 textureCompressionETC2;
+    Bool32 textureCompressionASTC_LDR;
+    Bool32 textureCompressionBC;
+    Bool32 occlusionQueryPrecise;
+    Bool32 pipelineStatisticsQuery;
+    Bool32 vertexPipelineStoresAndAtomics;
+    Bool32 fragmentStoresAndAtomics;
+    Bool32 shaderTessellationAndGeometryPointSize;
+    Bool32 shaderImageGatherExtended;
+    Bool32 shaderStorageImageExtendedFormats;
+    Bool32 shaderStorageImageMultisample;
+    Bool32 shaderStorageImageReadWithoutFormat;
+    Bool32 shaderStorageImageWriteWithoutFormat;
+    Bool32 shaderUniformBufferArrayDynamicIndexing;
+    Bool32 shaderSampledImageArrayDynamicIndexing;
+    Bool32 shaderStorageBufferArrayDynamicIndexing;
+    Bool32 shaderStorageImageArrayDynamicIndexing;
+    Bool32 shaderClipDistance;
+    Bool32 shaderCullDistance;
+    Bool32 shaderFloat64;
+    Bool32 shaderInt64;
+    Bool32 shaderInt16;
+    Bool32 shaderResourceResidency;
+    Bool32 shaderResourceMinLod;
+    Bool32 sparseBinding;
+    Bool32 sparseResidencyBuffer;
+    Bool32 sparseResidencyImage2D;
+    Bool32 sparseResidencyImage3D;
+    Bool32 sparseResidency2Samples;
+    Bool32 sparseResidency4Samples;
+    Bool32 sparseResidency8Samples;
+    Bool32 sparseResidency16Samples;
+    Bool32 sparseResidencyAliased;
+    Bool32 variableMultisampleRate;
+    Bool32 inheritedQueries;
+  };
+  static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceSparseProperties
+  {
+    operator const VkPhysicalDeviceSparseProperties&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>(this);
+    }
+
+    bool operator==( PhysicalDeviceSparseProperties const& rhs ) const
+    {
+      return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
+          && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
+          && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
+          && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
+          && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
+    }
+
+    bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Bool32 residencyStandard2DBlockShape;
+    Bool32 residencyStandard2DMultisampleBlockShape;
+    Bool32 residencyStandard3DBlockShape;
+    Bool32 residencyAlignedMipSize;
+    Bool32 residencyNonResidentStrict;
+  };
+  static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
+
+  struct DrawIndirectCommand
+  {
+    DrawIndirectCommand( uint32_t vertexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstVertex_ = 0, uint32_t firstInstance_ = 0 )
+      : vertexCount( vertexCount_ )
+      , instanceCount( instanceCount_ )
+      , firstVertex( firstVertex_ )
+      , firstInstance( firstInstance_ )
+    {
+    }
+
+    DrawIndirectCommand( VkDrawIndirectCommand const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DrawIndirectCommand ) );
+    }
+
+    DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DrawIndirectCommand ) );
+      return *this;
+    }
+    DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ )
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+
+    DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ )
+    {
+      firstVertex = firstVertex_;
+      return *this;
+    }
+
+    DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
+    {
+      firstInstance = firstInstance_;
+      return *this;
+    }
+
+    operator const VkDrawIndirectCommand&() const
+    {
+      return *reinterpret_cast<const VkDrawIndirectCommand*>(this);
+    }
+
+    bool operator==( DrawIndirectCommand const& rhs ) const
+    {
+      return ( vertexCount == rhs.vertexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstVertex == rhs.firstVertex )
+          && ( firstInstance == rhs.firstInstance );
+    }
+
+    bool operator!=( DrawIndirectCommand const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t vertexCount;
+    uint32_t instanceCount;
+    uint32_t firstVertex;
+    uint32_t firstInstance;
+  };
+  static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+
+  struct DrawIndexedIndirectCommand
+  {
+    DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstIndex_ = 0, int32_t vertexOffset_ = 0, uint32_t firstInstance_ = 0 )
+      : indexCount( indexCount_ )
+      , instanceCount( instanceCount_ )
+      , firstIndex( firstIndex_ )
+      , vertexOffset( vertexOffset_ )
+      , firstInstance( firstInstance_ )
+    {
+    }
+
+    DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) );
+    }
+
+    DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) );
+      return *this;
+    }
+    DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ )
+    {
+      indexCount = indexCount_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ )
+    {
+      firstIndex = firstIndex_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ )
+    {
+      vertexOffset = vertexOffset_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
+    {
+      firstInstance = firstInstance_;
+      return *this;
+    }
+
+    operator const VkDrawIndexedIndirectCommand&() const
+    {
+      return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>(this);
+    }
+
+    bool operator==( DrawIndexedIndirectCommand const& rhs ) const
+    {
+      return ( indexCount == rhs.indexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstIndex == rhs.firstIndex )
+          && ( vertexOffset == rhs.vertexOffset )
+          && ( firstInstance == rhs.firstInstance );
+    }
+
+    bool operator!=( DrawIndexedIndirectCommand const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t indexCount;
+    uint32_t instanceCount;
+    uint32_t firstIndex;
+    int32_t vertexOffset;
+    uint32_t firstInstance;
+  };
+  static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
+
+  struct DispatchIndirectCommand
+  {
+    DispatchIndirectCommand( uint32_t x_ = 0, uint32_t y_ = 0, uint32_t z_ = 0 )
+      : x( x_ )
+      , y( y_ )
+      , z( z_ )
+    {
+    }
+
+    DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) );
+    }
+
+    DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) );
+      return *this;
+    }
+    DispatchIndirectCommand& setX( uint32_t x_ )
+    {
+      x = x_;
+      return *this;
+    }
+
+    DispatchIndirectCommand& setY( uint32_t y_ )
+    {
+      y = y_;
+      return *this;
+    }
+
+    DispatchIndirectCommand& setZ( uint32_t z_ )
+    {
+      z = z_;
+      return *this;
+    }
+
+    operator const VkDispatchIndirectCommand&() const
+    {
+      return *reinterpret_cast<const VkDispatchIndirectCommand*>(this);
+    }
+
+    bool operator==( DispatchIndirectCommand const& rhs ) const
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z );
+    }
+
+    bool operator!=( DispatchIndirectCommand const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t x;
+    uint32_t y;
+    uint32_t z;
+  };
+  static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
+
+  struct DisplayPlanePropertiesKHR
+  {
+    operator const VkDisplayPlanePropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>(this);
+    }
+
+    bool operator==( DisplayPlanePropertiesKHR const& rhs ) const
+    {
+      return ( currentDisplay == rhs.currentDisplay )
+          && ( currentStackIndex == rhs.currentStackIndex );
+    }
+
+    bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DisplayKHR currentDisplay;
+    uint32_t currentStackIndex;
+  };
+  static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
+
+  struct DisplayModeParametersKHR
+  {
+    DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(), uint32_t refreshRate_ = 0 )
+      : visibleRegion( visibleRegion_ )
+      , refreshRate( refreshRate_ )
+    {
+    }
+
+    DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) );
+    }
+
+    DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) );
+      return *this;
+    }
+    DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ )
+    {
+      visibleRegion = visibleRegion_;
+      return *this;
+    }
+
+    DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ )
+    {
+      refreshRate = refreshRate_;
+      return *this;
+    }
+
+    operator const VkDisplayModeParametersKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayModeParametersKHR*>(this);
+    }
+
+    bool operator==( DisplayModeParametersKHR const& rhs ) const
+    {
+      return ( visibleRegion == rhs.visibleRegion )
+          && ( refreshRate == rhs.refreshRate );
+    }
+
+    bool operator!=( DisplayModeParametersKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Extent2D visibleRegion;
+    uint32_t refreshRate;
+  };
+  static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
+
+  struct DisplayModePropertiesKHR
+  {
+    operator const VkDisplayModePropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayModePropertiesKHR*>(this);
+    }
+
+    bool operator==( DisplayModePropertiesKHR const& rhs ) const
+    {
+      return ( displayMode == rhs.displayMode )
+          && ( parameters == rhs.parameters );
+    }
+
+    bool operator!=( DisplayModePropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DisplayModeKHR displayMode;
+    DisplayModeParametersKHR parameters;
+  };
+  static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
+
+  struct RectLayerKHR
+  {
+    RectLayerKHR( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D(), uint32_t layer_ = 0 )
+      : offset( offset_ )
+      , extent( extent_ )
+      , layer( layer_ )
+    {
+    }
+
+    RectLayerKHR( VkRectLayerKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RectLayerKHR ) );
+    }
+
+    RectLayerKHR& operator=( VkRectLayerKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RectLayerKHR ) );
+      return *this;
+    }
+    RectLayerKHR& setOffset( Offset2D offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    RectLayerKHR& setExtent( Extent2D extent_ )
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    RectLayerKHR& setLayer( uint32_t layer_ )
+    {
+      layer = layer_;
+      return *this;
+    }
+
+    operator const VkRectLayerKHR&() const
+    {
+      return *reinterpret_cast<const VkRectLayerKHR*>(this);
+    }
+
+    bool operator==( RectLayerKHR const& rhs ) const
+    {
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent )
+          && ( layer == rhs.layer );
+    }
+
+    bool operator!=( RectLayerKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Offset2D offset;
+    Extent2D extent;
+    uint32_t layer;
+  };
+  static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
+
+  struct PresentRegionKHR
+  {
+    PresentRegionKHR( uint32_t rectangleCount_ = 0, const RectLayerKHR* pRectangles_ = nullptr )
+      : rectangleCount( rectangleCount_ )
+      , pRectangles( pRectangles_ )
+    {
+    }
+
+    PresentRegionKHR( VkPresentRegionKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentRegionKHR ) );
+    }
+
+    PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentRegionKHR ) );
+      return *this;
+    }
+    PresentRegionKHR& setRectangleCount( uint32_t rectangleCount_ )
+    {
+      rectangleCount = rectangleCount_;
+      return *this;
+    }
+
+    PresentRegionKHR& setPRectangles( const RectLayerKHR* pRectangles_ )
+    {
+      pRectangles = pRectangles_;
+      return *this;
+    }
+
+    operator const VkPresentRegionKHR&() const
+    {
+      return *reinterpret_cast<const VkPresentRegionKHR*>(this);
+    }
+
+    bool operator==( PresentRegionKHR const& rhs ) const
+    {
+      return ( rectangleCount == rhs.rectangleCount )
+          && ( pRectangles == rhs.pRectangles );
+    }
+
+    bool operator!=( PresentRegionKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t rectangleCount;
+    const RectLayerKHR* pRectangles;
+  };
+  static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
+
+  struct XYColorEXT
+  {
+    XYColorEXT( float x_ = 0, float y_ = 0 )
+      : x( x_ )
+      , y( y_ )
+    {
+    }
+
+    XYColorEXT( VkXYColorEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( XYColorEXT ) );
+    }
+
+    XYColorEXT& operator=( VkXYColorEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( XYColorEXT ) );
+      return *this;
+    }
+    XYColorEXT& setX( float x_ )
+    {
+      x = x_;
+      return *this;
+    }
+
+    XYColorEXT& setY( float y_ )
+    {
+      y = y_;
+      return *this;
+    }
+
+    operator const VkXYColorEXT&() const
+    {
+      return *reinterpret_cast<const VkXYColorEXT*>(this);
+    }
+
+    bool operator==( XYColorEXT const& rhs ) const
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( XYColorEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    float x;
+    float y;
+  };
+  static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
+
+  struct RefreshCycleDurationGOOGLE
+  {
+    RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = 0 )
+      : refreshDuration( refreshDuration_ )
+    {
+    }
+
+    RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RefreshCycleDurationGOOGLE ) );
+    }
+
+    RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RefreshCycleDurationGOOGLE ) );
+      return *this;
+    }
+    RefreshCycleDurationGOOGLE& setRefreshDuration( uint64_t refreshDuration_ )
+    {
+      refreshDuration = refreshDuration_;
+      return *this;
+    }
+
+    operator const VkRefreshCycleDurationGOOGLE&() const
+    {
+      return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>(this);
+    }
+
+    bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const
+    {
+      return ( refreshDuration == rhs.refreshDuration );
+    }
+
+    bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint64_t refreshDuration;
+  };
+  static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
+
+  struct PastPresentationTimingGOOGLE
+  {
+    PastPresentationTimingGOOGLE( uint32_t presentID_ = 0, uint64_t desiredPresentTime_ = 0, uint64_t actualPresentTime_ = 0, uint64_t earliestPresentTime_ = 0, uint64_t presentMargin_ = 0 )
+      : presentID( presentID_ )
+      , desiredPresentTime( desiredPresentTime_ )
+      , actualPresentTime( actualPresentTime_ )
+      , earliestPresentTime( earliestPresentTime_ )
+      , presentMargin( presentMargin_ )
+    {
+    }
+
+    PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PastPresentationTimingGOOGLE ) );
+    }
+
+    PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PastPresentationTimingGOOGLE ) );
+      return *this;
+    }
+    PastPresentationTimingGOOGLE& setPresentID( uint32_t presentID_ )
+    {
+      presentID = presentID_;
+      return *this;
+    }
+
+    PastPresentationTimingGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ )
+    {
+      desiredPresentTime = desiredPresentTime_;
+      return *this;
+    }
+
+    PastPresentationTimingGOOGLE& setActualPresentTime( uint64_t actualPresentTime_ )
+    {
+      actualPresentTime = actualPresentTime_;
+      return *this;
+    }
+
+    PastPresentationTimingGOOGLE& setEarliestPresentTime( uint64_t earliestPresentTime_ )
+    {
+      earliestPresentTime = earliestPresentTime_;
+      return *this;
+    }
+
+    PastPresentationTimingGOOGLE& setPresentMargin( uint64_t presentMargin_ )
+    {
+      presentMargin = presentMargin_;
+      return *this;
+    }
+
+    operator const VkPastPresentationTimingGOOGLE&() const
+    {
+      return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>(this);
+    }
+
+    bool operator==( PastPresentationTimingGOOGLE const& rhs ) const
+    {
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime )
+          && ( actualPresentTime == rhs.actualPresentTime )
+          && ( earliestPresentTime == rhs.earliestPresentTime )
+          && ( presentMargin == rhs.presentMargin );
+    }
+
+    bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t presentID;
+    uint64_t desiredPresentTime;
+    uint64_t actualPresentTime;
+    uint64_t earliestPresentTime;
+    uint64_t presentMargin;
+  };
+  static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
+
+  struct PresentTimeGOOGLE
+  {
+    PresentTimeGOOGLE( uint32_t presentID_ = 0, uint64_t desiredPresentTime_ = 0 )
+      : presentID( presentID_ )
+      , desiredPresentTime( desiredPresentTime_ )
+    {
+    }
+
+    PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) );
+    }
+
+    PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) );
+      return *this;
+    }
+    PresentTimeGOOGLE& setPresentID( uint32_t presentID_ )
+    {
+      presentID = presentID_;
+      return *this;
+    }
+
+    PresentTimeGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ )
+    {
+      desiredPresentTime = desiredPresentTime_;
+      return *this;
+    }
+
+    operator const VkPresentTimeGOOGLE&() const
+    {
+      return *reinterpret_cast<const VkPresentTimeGOOGLE*>(this);
+    }
+
+    bool operator==( PresentTimeGOOGLE const& rhs ) const
+    {
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime );
+    }
+
+    bool operator!=( PresentTimeGOOGLE const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t presentID;
+    uint64_t desiredPresentTime;
+  };
+  static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
+
+  struct ViewportWScalingNV
+  {
+    ViewportWScalingNV( float xcoeff_ = 0, float ycoeff_ = 0 )
+      : xcoeff( xcoeff_ )
+      , ycoeff( ycoeff_ )
+    {
+    }
+
+    ViewportWScalingNV( VkViewportWScalingNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ViewportWScalingNV ) );
+    }
+
+    ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ViewportWScalingNV ) );
+      return *this;
+    }
+    ViewportWScalingNV& setXcoeff( float xcoeff_ )
+    {
+      xcoeff = xcoeff_;
+      return *this;
+    }
+
+    ViewportWScalingNV& setYcoeff( float ycoeff_ )
+    {
+      ycoeff = ycoeff_;
+      return *this;
+    }
+
+    operator const VkViewportWScalingNV&() const
+    {
+      return *reinterpret_cast<const VkViewportWScalingNV*>(this);
+    }
+
+    bool operator==( ViewportWScalingNV const& rhs ) const
+    {
+      return ( xcoeff == rhs.xcoeff )
+          && ( ycoeff == rhs.ycoeff );
+    }
+
+    bool operator!=( ViewportWScalingNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    float xcoeff;
+    float ycoeff;
+  };
+  static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+
+  enum class ImageLayout
+  {
+    eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
+    eGeneral = VK_IMAGE_LAYOUT_GENERAL,
+    eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+    eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+    eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+    eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+    ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+    eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR
+  };
+
+  struct DescriptorImageInfo
+  {
+    DescriptorImageInfo( Sampler sampler_ = Sampler(), ImageView imageView_ = ImageView(), ImageLayout imageLayout_ = ImageLayout::eUndefined )
+      : sampler( sampler_ )
+      , imageView( imageView_ )
+      , imageLayout( imageLayout_ )
+    {
+    }
+
+    DescriptorImageInfo( VkDescriptorImageInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorImageInfo ) );
+    }
+
+    DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorImageInfo ) );
+      return *this;
+    }
+    DescriptorImageInfo& setSampler( Sampler sampler_ )
+    {
+      sampler = sampler_;
+      return *this;
+    }
+
+    DescriptorImageInfo& setImageView( ImageView imageView_ )
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ )
+    {
+      imageLayout = imageLayout_;
+      return *this;
+    }
+
+    operator const VkDescriptorImageInfo&() const
+    {
+      return *reinterpret_cast<const VkDescriptorImageInfo*>(this);
+    }
+
+    bool operator==( DescriptorImageInfo const& rhs ) const
+    {
+      return ( sampler == rhs.sampler )
+          && ( imageView == rhs.imageView )
+          && ( imageLayout == rhs.imageLayout );
+    }
+
+    bool operator!=( DescriptorImageInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Sampler sampler;
+    ImageView imageView;
+    ImageLayout imageLayout;
+  };
+  static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+
+  struct AttachmentReference
+  {
+    AttachmentReference( uint32_t attachment_ = 0, ImageLayout layout_ = ImageLayout::eUndefined )
+      : attachment( attachment_ )
+      , layout( layout_ )
+    {
+    }
+
+    AttachmentReference( VkAttachmentReference const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AttachmentReference ) );
+    }
+
+    AttachmentReference& operator=( VkAttachmentReference const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AttachmentReference ) );
+      return *this;
+    }
+    AttachmentReference& setAttachment( uint32_t attachment_ )
+    {
+      attachment = attachment_;
+      return *this;
+    }
+
+    AttachmentReference& setLayout( ImageLayout layout_ )
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    operator const VkAttachmentReference&() const
+    {
+      return *reinterpret_cast<const VkAttachmentReference*>(this);
+    }
+
+    bool operator==( AttachmentReference const& rhs ) const
+    {
+      return ( attachment == rhs.attachment )
+          && ( layout == rhs.layout );
+    }
+
+    bool operator!=( AttachmentReference const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t attachment;
+    ImageLayout layout;
+  };
+  static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+
+  enum class AttachmentLoadOp
+  {
+    eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
+    eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
+    eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
+  };
+
+  enum class AttachmentStoreOp
+  {
+    eStore = VK_ATTACHMENT_STORE_OP_STORE,
+    eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
+  };
+
+  enum class ImageType
+  {
+    e1D = VK_IMAGE_TYPE_1D,
+    e2D = VK_IMAGE_TYPE_2D,
+    e3D = VK_IMAGE_TYPE_3D
+  };
+
+  enum class ImageTiling
+  {
+    eOptimal = VK_IMAGE_TILING_OPTIMAL,
+    eLinear = VK_IMAGE_TILING_LINEAR
+  };
+
+  enum class ImageViewType
+  {
+    e1D = VK_IMAGE_VIEW_TYPE_1D,
+    e2D = VK_IMAGE_VIEW_TYPE_2D,
+    e3D = VK_IMAGE_VIEW_TYPE_3D,
+    eCube = VK_IMAGE_VIEW_TYPE_CUBE,
+    e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
+    e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+    eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
+  };
+
+  enum class CommandBufferLevel
+  {
+    ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+    eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
+  };
+
+  enum class ComponentSwizzle
+  {
+    eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
+    eZero = VK_COMPONENT_SWIZZLE_ZERO,
+    eOne = VK_COMPONENT_SWIZZLE_ONE,
+    eR = VK_COMPONENT_SWIZZLE_R,
+    eG = VK_COMPONENT_SWIZZLE_G,
+    eB = VK_COMPONENT_SWIZZLE_B,
+    eA = VK_COMPONENT_SWIZZLE_A
+  };
+
+  struct ComponentMapping
+  {
+    ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, ComponentSwizzle g_ = ComponentSwizzle::eIdentity, ComponentSwizzle b_ = ComponentSwizzle::eIdentity, ComponentSwizzle a_ = ComponentSwizzle::eIdentity )
+      : r( r_ )
+      , g( g_ )
+      , b( b_ )
+      , a( a_ )
+    {
+    }
+
+    ComponentMapping( VkComponentMapping const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ComponentMapping ) );
+    }
+
+    ComponentMapping& operator=( VkComponentMapping const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ComponentMapping ) );
+      return *this;
+    }
+    ComponentMapping& setR( ComponentSwizzle r_ )
+    {
+      r = r_;
+      return *this;
+    }
+
+    ComponentMapping& setG( ComponentSwizzle g_ )
+    {
+      g = g_;
+      return *this;
+    }
+
+    ComponentMapping& setB( ComponentSwizzle b_ )
+    {
+      b = b_;
+      return *this;
+    }
+
+    ComponentMapping& setA( ComponentSwizzle a_ )
+    {
+      a = a_;
+      return *this;
+    }
+
+    operator const VkComponentMapping&() const
+    {
+      return *reinterpret_cast<const VkComponentMapping*>(this);
+    }
+
+    bool operator==( ComponentMapping const& rhs ) const
+    {
+      return ( r == rhs.r )
+          && ( g == rhs.g )
+          && ( b == rhs.b )
+          && ( a == rhs.a );
+    }
+
+    bool operator!=( ComponentMapping const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ComponentSwizzle r;
+    ComponentSwizzle g;
+    ComponentSwizzle b;
+    ComponentSwizzle a;
+  };
+  static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
+
+  enum class DescriptorType
+  {
+    eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
+    eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+    eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+    eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
+    eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
+    eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
+    eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+    eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+    eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+    eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+    eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
+  };
+
+  struct DescriptorPoolSize
+  {
+    DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0 )
+      : type( type_ )
+      , descriptorCount( descriptorCount_ )
+    {
+    }
+
+    DescriptorPoolSize( VkDescriptorPoolSize const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorPoolSize ) );
+    }
+
+    DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorPoolSize ) );
+      return *this;
+    }
+    DescriptorPoolSize& setType( DescriptorType type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ )
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    operator const VkDescriptorPoolSize&() const
+    {
+      return *reinterpret_cast<const VkDescriptorPoolSize*>(this);
+    }
+
+    bool operator==( DescriptorPoolSize const& rhs ) const
+    {
+      return ( type == rhs.type )
+          && ( descriptorCount == rhs.descriptorCount );
+    }
+
+    bool operator!=( DescriptorPoolSize const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DescriptorType type;
+    uint32_t descriptorCount;
+  };
+  static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+
+  struct DescriptorUpdateTemplateEntryKHR
+  {
+    DescriptorUpdateTemplateEntryKHR( uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, size_t offset_ = 0, size_t stride_ = 0 )
+      : dstBinding( dstBinding_ )
+      , dstArrayElement( dstArrayElement_ )
+      , descriptorCount( descriptorCount_ )
+      , descriptorType( descriptorType_ )
+      , offset( offset_ )
+      , stride( stride_ )
+    {
+    }
+
+    DescriptorUpdateTemplateEntryKHR( VkDescriptorUpdateTemplateEntryKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntryKHR ) );
+    }
+
+    DescriptorUpdateTemplateEntryKHR& operator=( VkDescriptorUpdateTemplateEntryKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntryKHR ) );
+      return *this;
+    }
+    DescriptorUpdateTemplateEntryKHR& setDstBinding( uint32_t dstBinding_ )
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntryKHR& setDstArrayElement( uint32_t dstArrayElement_ )
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntryKHR& setDescriptorCount( uint32_t descriptorCount_ )
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntryKHR& setDescriptorType( DescriptorType descriptorType_ )
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntryKHR& setOffset( size_t offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntryKHR& setStride( size_t stride_ )
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    operator const VkDescriptorUpdateTemplateEntryKHR&() const
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntryKHR*>(this);
+    }
+
+    bool operator==( DescriptorUpdateTemplateEntryKHR const& rhs ) const
+    {
+      return ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( offset == rhs.offset )
+          && ( stride == rhs.stride );
+    }
+
+    bool operator!=( DescriptorUpdateTemplateEntryKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+    DescriptorType descriptorType;
+    size_t offset;
+    size_t stride;
+  };
+  static_assert( sizeof( DescriptorUpdateTemplateEntryKHR ) == sizeof( VkDescriptorUpdateTemplateEntryKHR ), "struct and wrapper have different size!" );
+
+  enum class QueryType
+  {
+    eOcclusion = VK_QUERY_TYPE_OCCLUSION,
+    ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
+    eTimestamp = VK_QUERY_TYPE_TIMESTAMP
+  };
+
+  enum class BorderColor
+  {
+    eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
+    eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
+    eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
+    eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+    eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
+  };
+
+  enum class PipelineBindPoint
+  {
+    eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
+    eCompute = VK_PIPELINE_BIND_POINT_COMPUTE
+  };
+
+  enum class PipelineCacheHeaderVersion
+  {
+    eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
+  };
+
+  enum class PrimitiveTopology
+  {
+    ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+    eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
+    eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+    eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+    eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+    eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+    eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
+    eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+    eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
+    eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
+    ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
+  };
+
+  enum class SharingMode
+  {
+    eExclusive = VK_SHARING_MODE_EXCLUSIVE,
+    eConcurrent = VK_SHARING_MODE_CONCURRENT
+  };
+
+  enum class IndexType
+  {
+    eUint16 = VK_INDEX_TYPE_UINT16,
+    eUint32 = VK_INDEX_TYPE_UINT32
+  };
+
+  enum class Filter
+  {
+    eNearest = VK_FILTER_NEAREST,
+    eLinear = VK_FILTER_LINEAR,
+    eCubicIMG = VK_FILTER_CUBIC_IMG
+  };
+
+  enum class SamplerMipmapMode
+  {
+    eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+    eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
+  };
+
+  enum class SamplerAddressMode
+  {
+    eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+    eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+    eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+    eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
+  };
+
+  enum class CompareOp
+  {
+    eNever = VK_COMPARE_OP_NEVER,
+    eLess = VK_COMPARE_OP_LESS,
+    eEqual = VK_COMPARE_OP_EQUAL,
+    eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
+    eGreater = VK_COMPARE_OP_GREATER,
+    eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
+    eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
+    eAlways = VK_COMPARE_OP_ALWAYS
+  };
+
+  enum class PolygonMode
+  {
+    eFill = VK_POLYGON_MODE_FILL,
+    eLine = VK_POLYGON_MODE_LINE,
+    ePoint = VK_POLYGON_MODE_POINT,
+    eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
+  };
+
+  enum class CullModeFlagBits
+  {
+    eNone = VK_CULL_MODE_NONE,
+    eFront = VK_CULL_MODE_FRONT_BIT,
+    eBack = VK_CULL_MODE_BACK_BIT,
+    eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
+  };
+
+  using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
+
+  VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
+  {
+    return CullModeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
+  {
+    return ~( CullModeFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CullModeFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
+    };
+  };
+
+  enum class FrontFace
+  {
+    eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+    eClockwise = VK_FRONT_FACE_CLOCKWISE
+  };
+
+  enum class BlendFactor
+  {
+    eZero = VK_BLEND_FACTOR_ZERO,
+    eOne = VK_BLEND_FACTOR_ONE,
+    eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
+    eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
+    eDstColor = VK_BLEND_FACTOR_DST_COLOR,
+    eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
+    eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
+    eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
+    eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
+    eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
+    eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
+    eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
+    eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
+    eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
+    eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
+    eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
+    eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
+    eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
+    eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
+  };
+
+  enum class BlendOp
+  {
+    eAdd = VK_BLEND_OP_ADD,
+    eSubtract = VK_BLEND_OP_SUBTRACT,
+    eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
+    eMin = VK_BLEND_OP_MIN,
+    eMax = VK_BLEND_OP_MAX,
+    eZeroEXT = VK_BLEND_OP_ZERO_EXT,
+    eSrcEXT = VK_BLEND_OP_SRC_EXT,
+    eDstEXT = VK_BLEND_OP_DST_EXT,
+    eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT,
+    eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT,
+    eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT,
+    eDstInEXT = VK_BLEND_OP_DST_IN_EXT,
+    eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT,
+    eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT,
+    eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT,
+    eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT,
+    eXorEXT = VK_BLEND_OP_XOR_EXT,
+    eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT,
+    eScreenEXT = VK_BLEND_OP_SCREEN_EXT,
+    eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT,
+    eDarkenEXT = VK_BLEND_OP_DARKEN_EXT,
+    eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT,
+    eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT,
+    eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT,
+    eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT,
+    eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT,
+    eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT,
+    eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT,
+    eInvertEXT = VK_BLEND_OP_INVERT_EXT,
+    eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT,
+    eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT,
+    eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT,
+    eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT,
+    eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT,
+    ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT,
+    eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT,
+    eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT,
+    eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT,
+    eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT,
+    eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT,
+    ePlusEXT = VK_BLEND_OP_PLUS_EXT,
+    ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT,
+    ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT,
+    ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT,
+    eMinusEXT = VK_BLEND_OP_MINUS_EXT,
+    eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT,
+    eContrastEXT = VK_BLEND_OP_CONTRAST_EXT,
+    eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT,
+    eRedEXT = VK_BLEND_OP_RED_EXT,
+    eGreenEXT = VK_BLEND_OP_GREEN_EXT,
+    eBlueEXT = VK_BLEND_OP_BLUE_EXT
+  };
+
+  enum class StencilOp
+  {
+    eKeep = VK_STENCIL_OP_KEEP,
+    eZero = VK_STENCIL_OP_ZERO,
+    eReplace = VK_STENCIL_OP_REPLACE,
+    eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
+    eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
+    eInvert = VK_STENCIL_OP_INVERT,
+    eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
+    eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
+  };
+
+  struct StencilOpState
+  {
+    StencilOpState( StencilOp failOp_ = StencilOp::eKeep, StencilOp passOp_ = StencilOp::eKeep, StencilOp depthFailOp_ = StencilOp::eKeep, CompareOp compareOp_ = CompareOp::eNever, uint32_t compareMask_ = 0, uint32_t writeMask_ = 0, uint32_t reference_ = 0 )
+      : failOp( failOp_ )
+      , passOp( passOp_ )
+      , depthFailOp( depthFailOp_ )
+      , compareOp( compareOp_ )
+      , compareMask( compareMask_ )
+      , writeMask( writeMask_ )
+      , reference( reference_ )
+    {
+    }
+
+    StencilOpState( VkStencilOpState const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( StencilOpState ) );
+    }
+
+    StencilOpState& operator=( VkStencilOpState const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( StencilOpState ) );
+      return *this;
+    }
+    StencilOpState& setFailOp( StencilOp failOp_ )
+    {
+      failOp = failOp_;
+      return *this;
+    }
+
+    StencilOpState& setPassOp( StencilOp passOp_ )
+    {
+      passOp = passOp_;
+      return *this;
+    }
+
+    StencilOpState& setDepthFailOp( StencilOp depthFailOp_ )
+    {
+      depthFailOp = depthFailOp_;
+      return *this;
+    }
+
+    StencilOpState& setCompareOp( CompareOp compareOp_ )
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    StencilOpState& setCompareMask( uint32_t compareMask_ )
+    {
+      compareMask = compareMask_;
+      return *this;
+    }
+
+    StencilOpState& setWriteMask( uint32_t writeMask_ )
+    {
+      writeMask = writeMask_;
+      return *this;
+    }
+
+    StencilOpState& setReference( uint32_t reference_ )
+    {
+      reference = reference_;
+      return *this;
+    }
+
+    operator const VkStencilOpState&() const
+    {
+      return *reinterpret_cast<const VkStencilOpState*>(this);
+    }
+
+    bool operator==( StencilOpState const& rhs ) const
+    {
+      return ( failOp == rhs.failOp )
+          && ( passOp == rhs.passOp )
+          && ( depthFailOp == rhs.depthFailOp )
+          && ( compareOp == rhs.compareOp )
+          && ( compareMask == rhs.compareMask )
+          && ( writeMask == rhs.writeMask )
+          && ( reference == rhs.reference );
+    }
+
+    bool operator!=( StencilOpState const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    StencilOp failOp;
+    StencilOp passOp;
+    StencilOp depthFailOp;
+    CompareOp compareOp;
+    uint32_t compareMask;
+    uint32_t writeMask;
+    uint32_t reference;
+  };
+  static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+
+  enum class LogicOp
+  {
+    eClear = VK_LOGIC_OP_CLEAR,
+    eAnd = VK_LOGIC_OP_AND,
+    eAndReverse = VK_LOGIC_OP_AND_REVERSE,
+    eCopy = VK_LOGIC_OP_COPY,
+    eAndInverted = VK_LOGIC_OP_AND_INVERTED,
+    eNoOp = VK_LOGIC_OP_NO_OP,
+    eXor = VK_LOGIC_OP_XOR,
+    eOr = VK_LOGIC_OP_OR,
+    eNor = VK_LOGIC_OP_NOR,
+    eEquivalent = VK_LOGIC_OP_EQUIVALENT,
+    eInvert = VK_LOGIC_OP_INVERT,
+    eOrReverse = VK_LOGIC_OP_OR_REVERSE,
+    eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
+    eOrInverted = VK_LOGIC_OP_OR_INVERTED,
+    eNand = VK_LOGIC_OP_NAND,
+    eSet = VK_LOGIC_OP_SET
+  };
+
+  enum class InternalAllocationType
+  {
+    eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
+  };
+
+  enum class SystemAllocationScope
+  {
+    eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+    eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
+    eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
+    eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
+    eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
+  };
+
+  enum class PhysicalDeviceType
+  {
+    eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+    eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
+    eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
+    eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
+    eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
+  };
+
+  enum class VertexInputRate
+  {
+    eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
+    eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
+  };
+
+  struct VertexInputBindingDescription
+  {
+    VertexInputBindingDescription( uint32_t binding_ = 0, uint32_t stride_ = 0, VertexInputRate inputRate_ = VertexInputRate::eVertex )
+      : binding( binding_ )
+      , stride( stride_ )
+      , inputRate( inputRate_ )
+    {
+    }
+
+    VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) );
+    }
+
+    VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) );
+      return *this;
+    }
+    VertexInputBindingDescription& setBinding( uint32_t binding_ )
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputBindingDescription& setStride( uint32_t stride_ )
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ )
+    {
+      inputRate = inputRate_;
+      return *this;
+    }
+
+    operator const VkVertexInputBindingDescription&() const
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDescription*>(this);
+    }
+
+    bool operator==( VertexInputBindingDescription const& rhs ) const
+    {
+      return ( binding == rhs.binding )
+          && ( stride == rhs.stride )
+          && ( inputRate == rhs.inputRate );
+    }
+
+    bool operator!=( VertexInputBindingDescription const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t binding;
+    uint32_t stride;
+    VertexInputRate inputRate;
+  };
+  static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
+
+  enum class Format
+  {
+    eUndefined = VK_FORMAT_UNDEFINED,
+    eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
+    eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+    eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+    eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
+    eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
+    eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+    eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+    eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+    eR8Unorm = VK_FORMAT_R8_UNORM,
+    eR8Snorm = VK_FORMAT_R8_SNORM,
+    eR8Uscaled = VK_FORMAT_R8_USCALED,
+    eR8Sscaled = VK_FORMAT_R8_SSCALED,
+    eR8Uint = VK_FORMAT_R8_UINT,
+    eR8Sint = VK_FORMAT_R8_SINT,
+    eR8Srgb = VK_FORMAT_R8_SRGB,
+    eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
+    eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
+    eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
+    eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
+    eR8G8Uint = VK_FORMAT_R8G8_UINT,
+    eR8G8Sint = VK_FORMAT_R8G8_SINT,
+    eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
+    eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
+    eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
+    eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
+    eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
+    eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
+    eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
+    eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
+    eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
+    eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
+    eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
+    eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
+    eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
+    eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
+    eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
+    eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
+    eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
+    eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
+    eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
+    eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
+    eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
+    eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
+    eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
+    eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
+    eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
+    eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
+    eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
+    eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
+    eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
+    eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+    eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+    eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
+    eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
+    eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
+    eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
+    eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+    eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+    eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
+    eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
+    eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
+    eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
+    eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
+    eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+    eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
+    eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
+    eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
+    eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
+    eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
+    eR16Unorm = VK_FORMAT_R16_UNORM,
+    eR16Snorm = VK_FORMAT_R16_SNORM,
+    eR16Uscaled = VK_FORMAT_R16_USCALED,
+    eR16Sscaled = VK_FORMAT_R16_SSCALED,
+    eR16Uint = VK_FORMAT_R16_UINT,
+    eR16Sint = VK_FORMAT_R16_SINT,
+    eR16Sfloat = VK_FORMAT_R16_SFLOAT,
+    eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
+    eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
+    eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
+    eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
+    eR16G16Uint = VK_FORMAT_R16G16_UINT,
+    eR16G16Sint = VK_FORMAT_R16G16_SINT,
+    eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
+    eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
+    eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
+    eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
+    eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
+    eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
+    eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
+    eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
+    eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
+    eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
+    eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
+    eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
+    eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
+    eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
+    eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
+    eR32Uint = VK_FORMAT_R32_UINT,
+    eR32Sint = VK_FORMAT_R32_SINT,
+    eR32Sfloat = VK_FORMAT_R32_SFLOAT,
+    eR32G32Uint = VK_FORMAT_R32G32_UINT,
+    eR32G32Sint = VK_FORMAT_R32G32_SINT,
+    eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
+    eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
+    eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
+    eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
+    eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
+    eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
+    eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
+    eR64Uint = VK_FORMAT_R64_UINT,
+    eR64Sint = VK_FORMAT_R64_SINT,
+    eR64Sfloat = VK_FORMAT_R64_SFLOAT,
+    eR64G64Uint = VK_FORMAT_R64G64_UINT,
+    eR64G64Sint = VK_FORMAT_R64G64_SINT,
+    eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
+    eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
+    eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
+    eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
+    eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
+    eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
+    eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
+    eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+    eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+    eD16Unorm = VK_FORMAT_D16_UNORM,
+    eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
+    eD32Sfloat = VK_FORMAT_D32_SFLOAT,
+    eS8Uint = VK_FORMAT_S8_UINT,
+    eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
+    eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
+    eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
+    eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
+    eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
+    eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
+    eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+    eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
+    eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
+    eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
+    eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
+    eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
+    eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
+    eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
+    eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
+    eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
+    eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
+    eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
+    eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
+    eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+    eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+    eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+    eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+    eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+    eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+    eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
+    eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
+    eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+    eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+    eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+    eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+    eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+    eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+    eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+    eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+    eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+    eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+    eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+    eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+    eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+    eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+    eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+    eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+    eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+    eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+    eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+    eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+    eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+    eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+    eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+    eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+    eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+    eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+    eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+    eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+    eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+    eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+    ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
+    ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG
+  };
+
+  struct VertexInputAttributeDescription
+  {
+    VertexInputAttributeDescription( uint32_t location_ = 0, uint32_t binding_ = 0, Format format_ = Format::eUndefined, uint32_t offset_ = 0 )
+      : location( location_ )
+      , binding( binding_ )
+      , format( format_ )
+      , offset( offset_ )
+    {
+    }
+
+    VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) );
+    }
+
+    VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) );
+      return *this;
+    }
+    VertexInputAttributeDescription& setLocation( uint32_t location_ )
+    {
+      location = location_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription& setBinding( uint32_t binding_ )
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription& setFormat( Format format_ )
+    {
+      format = format_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription& setOffset( uint32_t offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    operator const VkVertexInputAttributeDescription&() const
+    {
+      return *reinterpret_cast<const VkVertexInputAttributeDescription*>(this);
+    }
+
+    bool operator==( VertexInputAttributeDescription const& rhs ) const
+    {
+      return ( location == rhs.location )
+          && ( binding == rhs.binding )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( VertexInputAttributeDescription const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t location;
+    uint32_t binding;
+    Format format;
+    uint32_t offset;
+  };
+  static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
+
+  enum class StructureType
+  {
+    eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+    eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+    eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+    eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+    eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+    eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+    eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+    eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
+    eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+    eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+    eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+    eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+    eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+    eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+    eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+    eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+    eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+    ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
+    ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+    ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+    ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+    ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+    ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+    ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+    ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+    ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+    ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+    ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+    eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+    eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+    ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+    eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+    eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+    eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+    eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+    eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+    eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
+    eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+    eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+    eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+    eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+    eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+    eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+    eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+    eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+    eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+    eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
+    eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
+    eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+    eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
+    ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
+    eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
+    eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
+    eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
+    eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
+    eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
+    eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
+    eMirSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR,
+    eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
+    eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
+    eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+    ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
+    eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
+    eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
+    eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
+    eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
+    eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
+    eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
+    eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
+    eRenderPassMultiviewCreateInfoKHX = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHX,
+    ePhysicalDeviceMultiviewFeaturesKHX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHX,
+    ePhysicalDeviceMultiviewPropertiesKHX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHX,
+    eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
+    eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
+    eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
+    ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
+    ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+    eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
+    eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
+    eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
+    ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
+    eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
+    eMemoryAllocateFlagsInfoKHX = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHX,
+    eBindBufferMemoryInfoKHX = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHX,
+    eBindImageMemoryInfoKHX = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHX,
+    eDeviceGroupRenderPassBeginInfoKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHX,
+    eDeviceGroupCommandBufferBeginInfoKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHX,
+    eDeviceGroupSubmitInfoKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHX,
+    eDeviceGroupBindSparseInfoKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHX,
+    eDeviceGroupPresentCapabilitiesKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHX,
+    eImageSwapchainCreateInfoKHX = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHX,
+    eBindImageMemorySwapchainInfoKHX = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHX,
+    eAcquireNextImageInfoKHX = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHX,
+    eDeviceGroupPresentInfoKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHX,
+    eDeviceGroupSwapchainCreateInfoKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHX,
+    eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
+    eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
+    ePhysicalDeviceGroupPropertiesKHX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHX,
+    eDeviceGroupDeviceCreateInfoKHX = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHX,
+    ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
+    eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
+    ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+    eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
+    ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+    eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+    eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
+    eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
+    eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
+    eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
+    eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+    eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
+    eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
+    eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
+    ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
+    eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
+    eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
+    eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
+    eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
+    eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
+    eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
+    ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
+    ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
+    ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
+    eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+    eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
+    eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
+    eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
+    eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
+    eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
+    eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
+    ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
+    eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT,
+    eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
+    eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
+    eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
+    eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
+    ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
+    ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
+    ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
+    ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
+    eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
+    eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
+    ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
+    eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
+    eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
+    eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
+    eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
+    eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
+    ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
+    eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
+    eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
+    ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
+    eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
+    eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
+    eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
+    eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+    ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+    eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+    eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
+    eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+    ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
+    ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
+    ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
+    ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+    ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV
+  };
+
+  struct ApplicationInfo
+  {
+    ApplicationInfo( const char* pApplicationName_ = nullptr, uint32_t applicationVersion_ = 0, const char* pEngineName_ = nullptr, uint32_t engineVersion_ = 0, uint32_t apiVersion_ = 0 )
+      : sType( StructureType::eApplicationInfo )
+      , pNext( nullptr )
+      , pApplicationName( pApplicationName_ )
+      , applicationVersion( applicationVersion_ )
+      , pEngineName( pEngineName_ )
+      , engineVersion( engineVersion_ )
+      , apiVersion( apiVersion_ )
+    {
+    }
+
+    ApplicationInfo( VkApplicationInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ApplicationInfo ) );
+    }
+
+    ApplicationInfo& operator=( VkApplicationInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ApplicationInfo ) );
+      return *this;
+    }
+    ApplicationInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ApplicationInfo& setPApplicationName( const char* pApplicationName_ )
+    {
+      pApplicationName = pApplicationName_;
+      return *this;
+    }
+
+    ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ )
+    {
+      applicationVersion = applicationVersion_;
+      return *this;
+    }
+
+    ApplicationInfo& setPEngineName( const char* pEngineName_ )
+    {
+      pEngineName = pEngineName_;
+      return *this;
+    }
+
+    ApplicationInfo& setEngineVersion( uint32_t engineVersion_ )
+    {
+      engineVersion = engineVersion_;
+      return *this;
+    }
+
+    ApplicationInfo& setApiVersion( uint32_t apiVersion_ )
+    {
+      apiVersion = apiVersion_;
+      return *this;
+    }
+
+    operator const VkApplicationInfo&() const
+    {
+      return *reinterpret_cast<const VkApplicationInfo*>(this);
+    }
+
+    bool operator==( ApplicationInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pApplicationName == rhs.pApplicationName )
+          && ( applicationVersion == rhs.applicationVersion )
+          && ( pEngineName == rhs.pEngineName )
+          && ( engineVersion == rhs.engineVersion )
+          && ( apiVersion == rhs.apiVersion );
+    }
+
+    bool operator!=( ApplicationInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    const char* pApplicationName;
+    uint32_t applicationVersion;
+    const char* pEngineName;
+    uint32_t engineVersion;
+    uint32_t apiVersion;
+  };
+  static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
+
+  struct DeviceQueueCreateInfo
+  {
+    DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueCount_ = 0, const float* pQueuePriorities_ = nullptr )
+      : sType( StructureType::eDeviceQueueCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , queueFamilyIndex( queueFamilyIndex_ )
+      , queueCount( queueCount_ )
+      , pQueuePriorities( pQueuePriorities_ )
+    {
+    }
+
+    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) );
+    }
+
+    DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) );
+      return *this;
+    }
+    DeviceQueueCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ )
+    {
+      queueCount = queueCount_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ )
+    {
+      pQueuePriorities = pQueuePriorities_;
+      return *this;
+    }
+
+    operator const VkDeviceQueueCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkDeviceQueueCreateInfo*>(this);
+    }
+
+    bool operator==( DeviceQueueCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueCount == rhs.queueCount )
+          && ( pQueuePriorities == rhs.pQueuePriorities );
+    }
+
+    bool operator!=( DeviceQueueCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceQueueCreateFlags flags;
+    uint32_t queueFamilyIndex;
+    uint32_t queueCount;
+    const float* pQueuePriorities;
+  };
+  static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
+
+  struct DeviceCreateInfo
+  {
+    DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(), uint32_t queueCreateInfoCount_ = 0, const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr, const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr )
+      : sType( StructureType::eDeviceCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , queueCreateInfoCount( queueCreateInfoCount_ )
+      , pQueueCreateInfos( pQueueCreateInfos_ )
+      , enabledLayerCount( enabledLayerCount_ )
+      , ppEnabledLayerNames( ppEnabledLayerNames_ )
+      , enabledExtensionCount( enabledExtensionCount_ )
+      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+      , pEnabledFeatures( pEnabledFeatures_ )
+    {
+    }
+
+    DeviceCreateInfo( VkDeviceCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceCreateInfo ) );
+    }
+
+    DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceCreateInfo ) );
+      return *this;
+    }
+    DeviceCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ )
+    {
+      queueCreateInfoCount = queueCreateInfoCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ )
+    {
+      pQueueCreateInfos = pQueueCreateInfos_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+    DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ )
+    {
+      pEnabledFeatures = pEnabledFeatures_;
+      return *this;
+    }
+
+    operator const VkDeviceCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkDeviceCreateInfo*>(this);
+    }
+
+    bool operator==( DeviceCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
+          && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
+          && ( pEnabledFeatures == rhs.pEnabledFeatures );
+    }
+
+    bool operator!=( DeviceCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceCreateFlags flags;
+    uint32_t queueCreateInfoCount;
+    const DeviceQueueCreateInfo* pQueueCreateInfos;
+    uint32_t enabledLayerCount;
+    const char* const* ppEnabledLayerNames;
+    uint32_t enabledExtensionCount;
+    const char* const* ppEnabledExtensionNames;
+    const PhysicalDeviceFeatures* pEnabledFeatures;
+  };
+  static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+
+  struct InstanceCreateInfo
+  {
+    InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(), const ApplicationInfo* pApplicationInfo_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr )
+      : sType( StructureType::eInstanceCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , pApplicationInfo( pApplicationInfo_ )
+      , enabledLayerCount( enabledLayerCount_ )
+      , ppEnabledLayerNames( ppEnabledLayerNames_ )
+      , enabledExtensionCount( enabledExtensionCount_ )
+      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+    {
+    }
+
+    InstanceCreateInfo( VkInstanceCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( InstanceCreateInfo ) );
+    }
+
+    InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( InstanceCreateInfo ) );
+      return *this;
+    }
+    InstanceCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ )
+    {
+      pApplicationInfo = pApplicationInfo_;
+      return *this;
+    }
+
+    InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+    InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+    operator const VkInstanceCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkInstanceCreateInfo*>(this);
+    }
+
+    bool operator==( InstanceCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pApplicationInfo == rhs.pApplicationInfo )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
+    }
+
+    bool operator!=( InstanceCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    InstanceCreateFlags flags;
+    const ApplicationInfo* pApplicationInfo;
+    uint32_t enabledLayerCount;
+    const char* const* ppEnabledLayerNames;
+    uint32_t enabledExtensionCount;
+    const char* const* ppEnabledExtensionNames;
+  };
+  static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+
+  struct MemoryAllocateInfo
+  {
+    MemoryAllocateInfo( DeviceSize allocationSize_ = 0, uint32_t memoryTypeIndex_ = 0 )
+      : sType( StructureType::eMemoryAllocateInfo )
+      , pNext( nullptr )
+      , allocationSize( allocationSize_ )
+      , memoryTypeIndex( memoryTypeIndex_ )
+    {
+    }
+
+    MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) );
+    }
+
+    MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) );
+      return *this;
+    }
+    MemoryAllocateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ )
+    {
+      allocationSize = allocationSize_;
+      return *this;
+    }
+
+    MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ )
+    {
+      memoryTypeIndex = memoryTypeIndex_;
+      return *this;
+    }
+
+    operator const VkMemoryAllocateInfo&() const
+    {
+      return *reinterpret_cast<const VkMemoryAllocateInfo*>(this);
+    }
+
+    bool operator==( MemoryAllocateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allocationSize == rhs.allocationSize )
+          && ( memoryTypeIndex == rhs.memoryTypeIndex );
+    }
+
+    bool operator!=( MemoryAllocateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceSize allocationSize;
+    uint32_t memoryTypeIndex;
+  };
+  static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+
+  struct MappedMemoryRange
+  {
+    MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
+      : sType( StructureType::eMappedMemoryRange )
+      , pNext( nullptr )
+      , memory( memory_ )
+      , offset( offset_ )
+      , size( size_ )
+    {
+    }
+
+    MappedMemoryRange( VkMappedMemoryRange const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MappedMemoryRange ) );
+    }
+
+    MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MappedMemoryRange ) );
+      return *this;
+    }
+    MappedMemoryRange& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MappedMemoryRange& setMemory( DeviceMemory memory_ )
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MappedMemoryRange& setOffset( DeviceSize offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    MappedMemoryRange& setSize( DeviceSize size_ )
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator const VkMappedMemoryRange&() const
+    {
+      return *reinterpret_cast<const VkMappedMemoryRange*>(this);
+    }
+
+    bool operator==( MappedMemoryRange const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( MappedMemoryRange const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceMemory memory;
+    DeviceSize offset;
+    DeviceSize size;
+  };
+  static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+
+  struct WriteDescriptorSet
+  {
+    WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, const DescriptorImageInfo* pImageInfo_ = nullptr, const DescriptorBufferInfo* pBufferInfo_ = nullptr, const BufferView* pTexelBufferView_ = nullptr )
+      : sType( StructureType::eWriteDescriptorSet )
+      , pNext( nullptr )
+      , dstSet( dstSet_ )
+      , dstBinding( dstBinding_ )
+      , dstArrayElement( dstArrayElement_ )
+      , descriptorCount( descriptorCount_ )
+      , descriptorType( descriptorType_ )
+      , pImageInfo( pImageInfo_ )
+      , pBufferInfo( pBufferInfo_ )
+      , pTexelBufferView( pTexelBufferView_ )
+    {
+    }
+
+    WriteDescriptorSet( VkWriteDescriptorSet const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( WriteDescriptorSet ) );
+    }
+
+    WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( WriteDescriptorSet ) );
+      return *this;
+    }
+    WriteDescriptorSet& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ )
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ )
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ )
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ )
+    {
+      pImageInfo = pImageInfo_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ )
+    {
+      pBufferInfo = pBufferInfo_;
+      return *this;
+    }
+
+    WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ )
+    {
+      pTexelBufferView = pTexelBufferView_;
+      return *this;
+    }
+
+    operator const VkWriteDescriptorSet&() const
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSet*>(this);
+    }
+
+    bool operator==( WriteDescriptorSet const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( pImageInfo == rhs.pImageInfo )
+          && ( pBufferInfo == rhs.pBufferInfo )
+          && ( pTexelBufferView == rhs.pTexelBufferView );
+    }
+
+    bool operator!=( WriteDescriptorSet const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DescriptorSet dstSet;
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+    DescriptorType descriptorType;
+    const DescriptorImageInfo* pImageInfo;
+    const DescriptorBufferInfo* pBufferInfo;
+    const BufferView* pTexelBufferView;
+  };
+  static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+
+  struct CopyDescriptorSet
+  {
+    CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(), uint32_t srcBinding_ = 0, uint32_t srcArrayElement_ = 0, DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0 )
+      : sType( StructureType::eCopyDescriptorSet )
+      , pNext( nullptr )
+      , srcSet( srcSet_ )
+      , srcBinding( srcBinding_ )
+      , srcArrayElement( srcArrayElement_ )
+      , dstSet( dstSet_ )
+      , dstBinding( dstBinding_ )
+      , dstArrayElement( dstArrayElement_ )
+      , descriptorCount( descriptorCount_ )
+    {
+    }
+
+    CopyDescriptorSet( VkCopyDescriptorSet const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CopyDescriptorSet ) );
+    }
+
+    CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CopyDescriptorSet ) );
+      return *this;
+    }
+    CopyDescriptorSet& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ )
+    {
+      srcSet = srcSet_;
+      return *this;
+    }
+
+    CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ )
+    {
+      srcBinding = srcBinding_;
+      return *this;
+    }
+
+    CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ )
+    {
+      srcArrayElement = srcArrayElement_;
+      return *this;
+    }
+
+    CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ )
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ )
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    operator const VkCopyDescriptorSet&() const
+    {
+      return *reinterpret_cast<const VkCopyDescriptorSet*>(this);
+    }
+
+    bool operator==( CopyDescriptorSet const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSet == rhs.srcSet )
+          && ( srcBinding == rhs.srcBinding )
+          && ( srcArrayElement == rhs.srcArrayElement )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount );
+    }
+
+    bool operator!=( CopyDescriptorSet const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DescriptorSet srcSet;
+    uint32_t srcBinding;
+    uint32_t srcArrayElement;
+    DescriptorSet dstSet;
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+  };
+  static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+
+  struct BufferViewCreateInfo
+  {
+    BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(), Buffer buffer_ = Buffer(), Format format_ = Format::eUndefined, DeviceSize offset_ = 0, DeviceSize range_ = 0 )
+      : sType( StructureType::eBufferViewCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , buffer( buffer_ )
+      , format( format_ )
+      , offset( offset_ )
+      , range( range_ )
+    {
+    }
+
+    BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) );
+    }
+
+    BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) );
+      return *this;
+    }
+    BufferViewCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    BufferViewCreateInfo& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BufferViewCreateInfo& setFormat( Format format_ )
+    {
+      format = format_;
+      return *this;
+    }
+
+    BufferViewCreateInfo& setOffset( DeviceSize offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    BufferViewCreateInfo& setRange( DeviceSize range_ )
+    {
+      range = range_;
+      return *this;
+    }
+
+    operator const VkBufferViewCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkBufferViewCreateInfo*>(this);
+    }
+
+    bool operator==( BufferViewCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+    }
+
+    bool operator!=( BufferViewCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    BufferViewCreateFlags flags;
+    Buffer buffer;
+    Format format;
+    DeviceSize offset;
+    DeviceSize range;
+  };
+  static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
+
+  struct ShaderModuleCreateInfo
+  {
+    ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(), size_t codeSize_ = 0, const uint32_t* pCode_ = nullptr )
+      : sType( StructureType::eShaderModuleCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , codeSize( codeSize_ )
+      , pCode( pCode_ )
+    {
+    }
+
+    ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) );
+    }
+
+    ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) );
+      return *this;
+    }
+    ShaderModuleCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ )
+    {
+      codeSize = codeSize_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ )
+    {
+      pCode = pCode_;
+      return *this;
+    }
+
+    operator const VkShaderModuleCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkShaderModuleCreateInfo*>(this);
+    }
+
+    bool operator==( ShaderModuleCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( codeSize == rhs.codeSize )
+          && ( pCode == rhs.pCode );
+    }
+
+    bool operator!=( ShaderModuleCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ShaderModuleCreateFlags flags;
+    size_t codeSize;
+    const uint32_t* pCode;
+  };
+  static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
+
+  struct DescriptorSetAllocateInfo
+  {
+    DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(), uint32_t descriptorSetCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr )
+      : sType( StructureType::eDescriptorSetAllocateInfo )
+      , pNext( nullptr )
+      , descriptorPool( descriptorPool_ )
+      , descriptorSetCount( descriptorSetCount_ )
+      , pSetLayouts( pSetLayouts_ )
+    {
+    }
+
+    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) );
+    }
+
+    DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) );
+      return *this;
+    }
+    DescriptorSetAllocateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ )
+    {
+      descriptorPool = descriptorPool_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ )
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+    operator const VkDescriptorSetAllocateInfo&() const
+    {
+      return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>(this);
+    }
+
+    bool operator==( DescriptorSetAllocateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorPool == rhs.descriptorPool )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pSetLayouts == rhs.pSetLayouts );
+    }
+
+    bool operator!=( DescriptorSetAllocateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DescriptorPool descriptorPool;
+    uint32_t descriptorSetCount;
+    const DescriptorSetLayout* pSetLayouts;
+  };
+  static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineVertexInputStateCreateInfo
+  {
+    PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(), uint32_t vertexBindingDescriptionCount_ = 0, const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, uint32_t vertexAttributeDescriptionCount_ = 0, const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr )
+      : sType( StructureType::ePipelineVertexInputStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
+      , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
+      , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
+      , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
+    {
+    }
+
+    PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
+    }
+
+    PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) );
+      return *this;
+    }
+    PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ )
+    {
+      vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ )
+    {
+      pVertexBindingDescriptions = pVertexBindingDescriptions_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ )
+    {
+      vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ )
+    {
+      pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
+      return *this;
+    }
+
+    operator const VkPipelineVertexInputStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
+          && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
+          && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
+          && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
+    }
+
+    bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineVertexInputStateCreateFlags flags;
+    uint32_t vertexBindingDescriptionCount;
+    const VertexInputBindingDescription* pVertexBindingDescriptions;
+    uint32_t vertexAttributeDescriptionCount;
+    const VertexInputAttributeDescription* pVertexAttributeDescriptions;
+  };
+  static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineInputAssemblyStateCreateInfo
+  {
+    PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(), PrimitiveTopology topology_ = PrimitiveTopology::ePointList, Bool32 primitiveRestartEnable_ = 0 )
+      : sType( StructureType::ePipelineInputAssemblyStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , topology( topology_ )
+      , primitiveRestartEnable( primitiveRestartEnable_ )
+    {
+    }
+
+    PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
+    }
+
+    PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) );
+      return *this;
+    }
+    PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ )
+    {
+      topology = topology_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ )
+    {
+      primitiveRestartEnable = primitiveRestartEnable_;
+      return *this;
+    }
+
+    operator const VkPipelineInputAssemblyStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( topology == rhs.topology )
+          && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
+    }
+
+    bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineInputAssemblyStateCreateFlags flags;
+    PrimitiveTopology topology;
+    Bool32 primitiveRestartEnable;
+  };
+  static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineTessellationStateCreateInfo
+  {
+    PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(), uint32_t patchControlPoints_ = 0 )
+      : sType( StructureType::ePipelineTessellationStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , patchControlPoints( patchControlPoints_ )
+    {
+    }
+
+    PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
+    }
+
+    PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) );
+      return *this;
+    }
+    PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ )
+    {
+      patchControlPoints = patchControlPoints_;
+      return *this;
+    }
+
+    operator const VkPipelineTessellationStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( patchControlPoints == rhs.patchControlPoints );
+    }
+
+    bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineTessellationStateCreateFlags flags;
+    uint32_t patchControlPoints;
+  };
+  static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineViewportStateCreateInfo
+  {
+    PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(), uint32_t viewportCount_ = 0, const Viewport* pViewports_ = nullptr, uint32_t scissorCount_ = 0, const Rect2D* pScissors_ = nullptr )
+      : sType( StructureType::ePipelineViewportStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , viewportCount( viewportCount_ )
+      , pViewports( pViewports_ )
+      , scissorCount( scissorCount_ )
+      , pScissors( pScissors_ )
+    {
+    }
+
+    PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) );
+    }
+
+    PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) );
+      return *this;
+    }
+    PipelineViewportStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ )
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ )
+    {
+      pViewports = pViewports_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ )
+    {
+      scissorCount = scissorCount_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ )
+    {
+      pScissors = pScissors_;
+      return *this;
+    }
+
+    operator const VkPipelineViewportStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineViewportStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewports == rhs.pViewports )
+          && ( scissorCount == rhs.scissorCount )
+          && ( pScissors == rhs.pScissors );
+    }
+
+    bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineViewportStateCreateFlags flags;
+    uint32_t viewportCount;
+    const Viewport* pViewports;
+    uint32_t scissorCount;
+    const Rect2D* pScissors;
+  };
+  static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineRasterizationStateCreateInfo
+  {
+    PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(), Bool32 depthClampEnable_ = 0, Bool32 rasterizerDiscardEnable_ = 0, PolygonMode polygonMode_ = PolygonMode::eFill, CullModeFlags cullMode_ = CullModeFlags(), FrontFace frontFace_ = FrontFace::eCounterClockwise, Bool32 depthBiasEnable_ = 0, float depthBiasConstantFactor_ = 0, float depthBiasClamp_ = 0, float depthBiasSlopeFactor_ = 0, float lineWidth_ = 0 )
+      : sType( StructureType::ePipelineRasterizationStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , depthClampEnable( depthClampEnable_ )
+      , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
+      , polygonMode( polygonMode_ )
+      , cullMode( cullMode_ )
+      , frontFace( frontFace_ )
+      , depthBiasEnable( depthBiasEnable_ )
+      , depthBiasConstantFactor( depthBiasConstantFactor_ )
+      , depthBiasClamp( depthBiasClamp_ )
+      , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
+      , lineWidth( lineWidth_ )
+    {
+    }
+
+    PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
+    }
+
+    PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) );
+      return *this;
+    }
+    PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ )
+    {
+      depthClampEnable = depthClampEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ )
+    {
+      rasterizerDiscardEnable = rasterizerDiscardEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ )
+    {
+      polygonMode = polygonMode_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ )
+    {
+      cullMode = cullMode_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ )
+    {
+      frontFace = frontFace_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ )
+    {
+      depthBiasEnable = depthBiasEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ )
+    {
+      depthBiasConstantFactor = depthBiasConstantFactor_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ )
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ )
+    {
+      depthBiasSlopeFactor = depthBiasSlopeFactor_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ )
+    {
+      lineWidth = lineWidth_;
+      return *this;
+    }
+
+    operator const VkPipelineRasterizationStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthClampEnable == rhs.depthClampEnable )
+          && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
+          && ( polygonMode == rhs.polygonMode )
+          && ( cullMode == rhs.cullMode )
+          && ( frontFace == rhs.frontFace )
+          && ( depthBiasEnable == rhs.depthBiasEnable )
+          && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
+          && ( lineWidth == rhs.lineWidth );
+    }
+
+    bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineRasterizationStateCreateFlags flags;
+    Bool32 depthClampEnable;
+    Bool32 rasterizerDiscardEnable;
+    PolygonMode polygonMode;
+    CullModeFlags cullMode;
+    FrontFace frontFace;
+    Bool32 depthBiasEnable;
+    float depthBiasConstantFactor;
+    float depthBiasClamp;
+    float depthBiasSlopeFactor;
+    float lineWidth;
+  };
+  static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineDepthStencilStateCreateInfo
+  {
+    PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(), Bool32 depthTestEnable_ = 0, Bool32 depthWriteEnable_ = 0, CompareOp depthCompareOp_ = CompareOp::eNever, Bool32 depthBoundsTestEnable_ = 0, Bool32 stencilTestEnable_ = 0, StencilOpState front_ = StencilOpState(), StencilOpState back_ = StencilOpState(), float minDepthBounds_ = 0, float maxDepthBounds_ = 0 )
+      : sType( StructureType::ePipelineDepthStencilStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , depthTestEnable( depthTestEnable_ )
+      , depthWriteEnable( depthWriteEnable_ )
+      , depthCompareOp( depthCompareOp_ )
+      , depthBoundsTestEnable( depthBoundsTestEnable_ )
+      , stencilTestEnable( stencilTestEnable_ )
+      , front( front_ )
+      , back( back_ )
+      , minDepthBounds( minDepthBounds_ )
+      , maxDepthBounds( maxDepthBounds_ )
+    {
+    }
+
+    PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
+    }
+
+    PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) );
+      return *this;
+    }
+    PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ )
+    {
+      depthTestEnable = depthTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ )
+    {
+      depthWriteEnable = depthWriteEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ )
+    {
+      depthCompareOp = depthCompareOp_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ )
+    {
+      depthBoundsTestEnable = depthBoundsTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ )
+    {
+      stencilTestEnable = stencilTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ )
+    {
+      front = front_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ )
+    {
+      back = back_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ )
+    {
+      minDepthBounds = minDepthBounds_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ )
+    {
+      maxDepthBounds = maxDepthBounds_;
+      return *this;
+    }
+
+    operator const VkPipelineDepthStencilStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthTestEnable == rhs.depthTestEnable )
+          && ( depthWriteEnable == rhs.depthWriteEnable )
+          && ( depthCompareOp == rhs.depthCompareOp )
+          && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
+          && ( stencilTestEnable == rhs.stencilTestEnable )
+          && ( front == rhs.front )
+          && ( back == rhs.back )
+          && ( minDepthBounds == rhs.minDepthBounds )
+          && ( maxDepthBounds == rhs.maxDepthBounds );
+    }
+
+    bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineDepthStencilStateCreateFlags flags;
+    Bool32 depthTestEnable;
+    Bool32 depthWriteEnable;
+    CompareOp depthCompareOp;
+    Bool32 depthBoundsTestEnable;
+    Bool32 stencilTestEnable;
+    StencilOpState front;
+    StencilOpState back;
+    float minDepthBounds;
+    float maxDepthBounds;
+  };
+  static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineCacheCreateInfo
+  {
+    PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr )
+      : sType( StructureType::ePipelineCacheCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , initialDataSize( initialDataSize_ )
+      , pInitialData( pInitialData_ )
+    {
+    }
+
+    PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) );
+    }
+
+    PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) );
+      return *this;
+    }
+    PipelineCacheCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ )
+    {
+      initialDataSize = initialDataSize_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ )
+    {
+      pInitialData = pInitialData_;
+      return *this;
+    }
+
+    operator const VkPipelineCacheCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineCacheCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineCacheCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( initialDataSize == rhs.initialDataSize )
+          && ( pInitialData == rhs.pInitialData );
+    }
+
+    bool operator!=( PipelineCacheCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineCacheCreateFlags flags;
+    size_t initialDataSize;
+    const void* pInitialData;
+  };
+  static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
+
+  struct SamplerCreateInfo
+  {
+    SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(), Filter magFilter_ = Filter::eNearest, Filter minFilter_ = Filter::eNearest, SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, float mipLodBias_ = 0, Bool32 anisotropyEnable_ = 0, float maxAnisotropy_ = 0, Bool32 compareEnable_ = 0, CompareOp compareOp_ = CompareOp::eNever, float minLod_ = 0, float maxLod_ = 0, BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, Bool32 unnormalizedCoordinates_ = 0 )
+      : sType( StructureType::eSamplerCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , magFilter( magFilter_ )
+      , minFilter( minFilter_ )
+      , mipmapMode( mipmapMode_ )
+      , addressModeU( addressModeU_ )
+      , addressModeV( addressModeV_ )
+      , addressModeW( addressModeW_ )
+      , mipLodBias( mipLodBias_ )
+      , anisotropyEnable( anisotropyEnable_ )
+      , maxAnisotropy( maxAnisotropy_ )
+      , compareEnable( compareEnable_ )
+      , compareOp( compareOp_ )
+      , minLod( minLod_ )
+      , maxLod( maxLod_ )
+      , borderColor( borderColor_ )
+      , unnormalizedCoordinates( unnormalizedCoordinates_ )
+    {
+    }
+
+    SamplerCreateInfo( VkSamplerCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SamplerCreateInfo ) );
+    }
+
+    SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SamplerCreateInfo ) );
+      return *this;
+    }
+    SamplerCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setMagFilter( Filter magFilter_ )
+    {
+      magFilter = magFilter_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setMinFilter( Filter minFilter_ )
+    {
+      minFilter = minFilter_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ )
+    {
+      mipmapMode = mipmapMode_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ )
+    {
+      addressModeU = addressModeU_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ )
+    {
+      addressModeV = addressModeV_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ )
+    {
+      addressModeW = addressModeW_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setMipLodBias( float mipLodBias_ )
+    {
+      mipLodBias = mipLodBias_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ )
+    {
+      anisotropyEnable = anisotropyEnable_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ )
+    {
+      maxAnisotropy = maxAnisotropy_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ )
+    {
+      compareEnable = compareEnable_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setCompareOp( CompareOp compareOp_ )
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setMinLod( float minLod_ )
+    {
+      minLod = minLod_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setMaxLod( float maxLod_ )
+    {
+      maxLod = maxLod_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setBorderColor( BorderColor borderColor_ )
+    {
+      borderColor = borderColor_;
+      return *this;
+    }
+
+    SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ )
+    {
+      unnormalizedCoordinates = unnormalizedCoordinates_;
+      return *this;
+    }
+
+    operator const VkSamplerCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkSamplerCreateInfo*>(this);
+    }
+
+    bool operator==( SamplerCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( magFilter == rhs.magFilter )
+          && ( minFilter == rhs.minFilter )
+          && ( mipmapMode == rhs.mipmapMode )
+          && ( addressModeU == rhs.addressModeU )
+          && ( addressModeV == rhs.addressModeV )
+          && ( addressModeW == rhs.addressModeW )
+          && ( mipLodBias == rhs.mipLodBias )
+          && ( anisotropyEnable == rhs.anisotropyEnable )
+          && ( maxAnisotropy == rhs.maxAnisotropy )
+          && ( compareEnable == rhs.compareEnable )
+          && ( compareOp == rhs.compareOp )
+          && ( minLod == rhs.minLod )
+          && ( maxLod == rhs.maxLod )
+          && ( borderColor == rhs.borderColor )
+          && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
+    }
+
+    bool operator!=( SamplerCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SamplerCreateFlags flags;
+    Filter magFilter;
+    Filter minFilter;
+    SamplerMipmapMode mipmapMode;
+    SamplerAddressMode addressModeU;
+    SamplerAddressMode addressModeV;
+    SamplerAddressMode addressModeW;
+    float mipLodBias;
+    Bool32 anisotropyEnable;
+    float maxAnisotropy;
+    Bool32 compareEnable;
+    CompareOp compareOp;
+    float minLod;
+    float maxLod;
+    BorderColor borderColor;
+    Bool32 unnormalizedCoordinates;
+  };
+  static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+
+  struct CommandBufferAllocateInfo
+  {
+    CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(), CommandBufferLevel level_ = CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = 0 )
+      : sType( StructureType::eCommandBufferAllocateInfo )
+      , pNext( nullptr )
+      , commandPool( commandPool_ )
+      , level( level_ )
+      , commandBufferCount( commandBufferCount_ )
+    {
+    }
+
+    CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) );
+    }
+
+    CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) );
+      return *this;
+    }
+    CommandBufferAllocateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ )
+    {
+      commandPool = commandPool_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ )
+    {
+      level = level_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    operator const VkCommandBufferAllocateInfo&() const
+    {
+      return *reinterpret_cast<const VkCommandBufferAllocateInfo*>(this);
+    }
+
+    bool operator==( CommandBufferAllocateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( commandPool == rhs.commandPool )
+          && ( level == rhs.level )
+          && ( commandBufferCount == rhs.commandBufferCount );
+    }
+
+    bool operator!=( CommandBufferAllocateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    CommandPool commandPool;
+    CommandBufferLevel level;
+    uint32_t commandBufferCount;
+  };
+  static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
+
+  struct RenderPassBeginInfo
+  {
+    RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(), Framebuffer framebuffer_ = Framebuffer(), Rect2D renderArea_ = Rect2D(), uint32_t clearValueCount_ = 0, const ClearValue* pClearValues_ = nullptr )
+      : sType( StructureType::eRenderPassBeginInfo )
+      , pNext( nullptr )
+      , renderPass( renderPass_ )
+      , framebuffer( framebuffer_ )
+      , renderArea( renderArea_ )
+      , clearValueCount( clearValueCount_ )
+      , pClearValues( pClearValues_ )
+    {
+    }
+
+    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) );
+    }
+
+    RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) );
+      return *this;
+    }
+    RenderPassBeginInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ )
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ )
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ )
+    {
+      renderArea = renderArea_;
+      return *this;
+    }
+
+    RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ )
+    {
+      clearValueCount = clearValueCount_;
+      return *this;
+    }
+
+    RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ )
+    {
+      pClearValues = pClearValues_;
+      return *this;
+    }
+
+    operator const VkRenderPassBeginInfo&() const
+    {
+      return *reinterpret_cast<const VkRenderPassBeginInfo*>(this);
+    }
+
+    bool operator==( RenderPassBeginInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( renderArea == rhs.renderArea )
+          && ( clearValueCount == rhs.clearValueCount )
+          && ( pClearValues == rhs.pClearValues );
+    }
+
+    bool operator!=( RenderPassBeginInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    RenderPass renderPass;
+    Framebuffer framebuffer;
+    Rect2D renderArea;
+    uint32_t clearValueCount;
+    const ClearValue* pClearValues;
+  };
+  static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+
+  struct EventCreateInfo
+  {
+    EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() )
+      : sType( StructureType::eEventCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+    {
+    }
+
+    EventCreateInfo( VkEventCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( EventCreateInfo ) );
+    }
+
+    EventCreateInfo& operator=( VkEventCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( EventCreateInfo ) );
+      return *this;
+    }
+    EventCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    EventCreateInfo& setFlags( EventCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator const VkEventCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkEventCreateInfo*>(this);
+    }
+
+    bool operator==( EventCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( EventCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    EventCreateFlags flags;
+  };
+  static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+
+  struct SemaphoreCreateInfo
+  {
+    SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() )
+      : sType( StructureType::eSemaphoreCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+    {
+    }
+
+    SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) );
+    }
+
+    SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) );
+      return *this;
+    }
+    SemaphoreCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator const VkSemaphoreCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkSemaphoreCreateInfo*>(this);
+    }
+
+    bool operator==( SemaphoreCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SemaphoreCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SemaphoreCreateFlags flags;
+  };
+  static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+
+  struct FramebufferCreateInfo
+  {
+    FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(), RenderPass renderPass_ = RenderPass(), uint32_t attachmentCount_ = 0, const ImageView* pAttachments_ = nullptr, uint32_t width_ = 0, uint32_t height_ = 0, uint32_t layers_ = 0 )
+      : sType( StructureType::eFramebufferCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , renderPass( renderPass_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+      , width( width_ )
+      , height( height_ )
+      , layers( layers_ )
+    {
+    }
+
+    FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) );
+    }
+
+    FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) );
+      return *this;
+    }
+    FramebufferCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ )
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ )
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    FramebufferCreateInfo& setWidth( uint32_t width_ )
+    {
+      width = width_;
+      return *this;
+    }
+
+    FramebufferCreateInfo& setHeight( uint32_t height_ )
+    {
+      height = height_;
+      return *this;
+    }
+
+    FramebufferCreateInfo& setLayers( uint32_t layers_ )
+    {
+      layers = layers_;
+      return *this;
+    }
+
+    operator const VkFramebufferCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkFramebufferCreateInfo*>(this);
+    }
+
+    bool operator==( FramebufferCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( renderPass == rhs.renderPass )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( layers == rhs.layers );
+    }
+
+    bool operator!=( FramebufferCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    FramebufferCreateFlags flags;
+    RenderPass renderPass;
+    uint32_t attachmentCount;
+    const ImageView* pAttachments;
+    uint32_t width;
+    uint32_t height;
+    uint32_t layers;
+  };
+  static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
+
+  struct DisplayModeCreateInfoKHR
+  {
+    DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(), DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() )
+      : sType( StructureType::eDisplayModeCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , parameters( parameters_ )
+    {
+    }
+
+    DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) );
+    }
+
+    DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) );
+      return *this;
+    }
+    DisplayModeCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ )
+    {
+      parameters = parameters_;
+      return *this;
+    }
+
+    operator const VkDisplayModeCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>(this);
+    }
+
+    bool operator==( DisplayModeCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( parameters == rhs.parameters );
+    }
+
+    bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DisplayModeCreateFlagsKHR flags;
+    DisplayModeParametersKHR parameters;
+  };
+  static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  struct DisplayPresentInfoKHR
+  {
+    DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(), Rect2D dstRect_ = Rect2D(), Bool32 persistent_ = 0 )
+      : sType( StructureType::eDisplayPresentInfoKHR )
+      , pNext( nullptr )
+      , srcRect( srcRect_ )
+      , dstRect( dstRect_ )
+      , persistent( persistent_ )
+    {
+    }
+
+    DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) );
+    }
+
+    DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) );
+      return *this;
+    }
+    DisplayPresentInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ )
+    {
+      srcRect = srcRect_;
+      return *this;
+    }
+
+    DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ )
+    {
+      dstRect = dstRect_;
+      return *this;
+    }
+
+    DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ )
+    {
+      persistent = persistent_;
+      return *this;
+    }
+
+    operator const VkDisplayPresentInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayPresentInfoKHR*>(this);
+    }
+
+    bool operator==( DisplayPresentInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcRect == rhs.srcRect )
+          && ( dstRect == rhs.dstRect )
+          && ( persistent == rhs.persistent );
+    }
+
+    bool operator!=( DisplayPresentInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Rect2D srcRect;
+    Rect2D dstRect;
+    Bool32 persistent;
+  };
+  static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidSurfaceCreateInfoKHR
+  {
+    AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(), ANativeWindow* window_ = nullptr )
+      : sType( StructureType::eAndroidSurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , window( window_ )
+    {
+    }
+
+    AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
+    }
+
+    AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+    AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR& setWindow( ANativeWindow* window_ )
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator const VkAndroidSurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    AndroidSurfaceCreateFlagsKHR flags;
+    ANativeWindow* window;
+  };
+  static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+  struct MirSurfaceCreateInfoKHR
+  {
+    MirSurfaceCreateInfoKHR( MirSurfaceCreateFlagsKHR flags_ = MirSurfaceCreateFlagsKHR(), MirConnection* connection_ = nullptr, MirSurface* mirSurface_ = nullptr )
+      : sType( StructureType::eMirSurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , connection( connection_ )
+      , mirSurface( mirSurface_ )
+    {
+    }
+
+    MirSurfaceCreateInfoKHR( VkMirSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MirSurfaceCreateInfoKHR ) );
+    }
+
+    MirSurfaceCreateInfoKHR& operator=( VkMirSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MirSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+    MirSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MirSurfaceCreateInfoKHR& setFlags( MirSurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MirSurfaceCreateInfoKHR& setConnection( MirConnection* connection_ )
+    {
+      connection = connection_;
+      return *this;
+    }
+
+    MirSurfaceCreateInfoKHR& setMirSurface( MirSurface* mirSurface_ )
+    {
+      mirSurface = mirSurface_;
+      return *this;
+    }
+
+    operator const VkMirSurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( MirSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( connection == rhs.connection )
+          && ( mirSurface == rhs.mirSurface );
+    }
+
+    bool operator!=( MirSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    MirSurfaceCreateFlagsKHR flags;
+    MirConnection* connection;
+    MirSurface* mirSurface;
+  };
+  static_assert( sizeof( MirSurfaceCreateInfoKHR ) == sizeof( VkMirSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  struct ViSurfaceCreateInfoNN
+  {
+    ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = ViSurfaceCreateFlagsNN(), void* window_ = nullptr )
+      : sType( StructureType::eViSurfaceCreateInfoNN )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , window( window_ )
+    {
+    }
+
+    ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) );
+    }
+
+    ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) );
+      return *this;
+    }
+    ViSurfaceCreateInfoNN& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN& setFlags( ViSurfaceCreateFlagsNN flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN& setWindow( void* window_ )
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator const VkViSurfaceCreateInfoNN&() const
+    {
+      return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>(this);
+    }
+
+    bool operator==( ViSurfaceCreateInfoNN const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ViSurfaceCreateFlagsNN flags;
+    void* window;
+  };
+  static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  struct WaylandSurfaceCreateInfoKHR
+  {
+    WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(), struct wl_display* display_ = nullptr, struct wl_surface* surface_ = nullptr )
+      : sType( StructureType::eWaylandSurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , display( display_ )
+      , surface( surface_ )
+    {
+    }
+
+    WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
+    }
+
+    WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+    WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ )
+    {
+      display = display_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ )
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    operator const VkWaylandSurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( display == rhs.display )
+          && ( surface == rhs.surface );
+    }
+
+    bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    WaylandSurfaceCreateFlagsKHR flags;
+    struct wl_display* display;
+    struct wl_surface* surface;
+  };
+  static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32SurfaceCreateInfoKHR
+  {
+    Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(), HINSTANCE hinstance_ = 0, HWND hwnd_ = 0 )
+      : sType( StructureType::eWin32SurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , hinstance( hinstance_ )
+      , hwnd( hwnd_ )
+    {
+    }
+
+    Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
+    }
+
+    Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) );
+      return *this;
+    }
+    Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ )
+    {
+      hinstance = hinstance_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ )
+    {
+      hwnd = hwnd_;
+      return *this;
+    }
+
+    operator const VkWin32SurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( hinstance == rhs.hinstance )
+          && ( hwnd == rhs.hwnd );
+    }
+
+    bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Win32SurfaceCreateFlagsKHR flags;
+    HINSTANCE hinstance;
+    HWND hwnd;
+  };
+  static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  struct XlibSurfaceCreateInfoKHR
+  {
+    XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(), Display* dpy_ = nullptr, Window window_ = 0 )
+      : sType( StructureType::eXlibSurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , dpy( dpy_ )
+      , window( window_ )
+    {
+    }
+
+    XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
+    }
+
+    XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+    XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ )
+    {
+      dpy = dpy_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR& setWindow( Window window_ )
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator const VkXlibSurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dpy == rhs.dpy )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    XlibSurfaceCreateFlagsKHR flags;
+    Display* dpy;
+    Window window;
+  };
+  static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  struct XcbSurfaceCreateInfoKHR
+  {
+    XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(), xcb_connection_t* connection_ = nullptr, xcb_window_t window_ = 0 )
+      : sType( StructureType::eXcbSurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , connection( connection_ )
+      , window( window_ )
+    {
+    }
+
+    XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
+    }
+
+    XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) );
+      return *this;
+    }
+    XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ )
+    {
+      connection = connection_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ )
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator const VkXcbSurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( connection == rhs.connection )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    XcbSurfaceCreateFlagsKHR flags;
+    xcb_connection_t* connection;
+    xcb_window_t window;
+  };
+  static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+  struct DebugMarkerMarkerInfoEXT
+  {
+    DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
+      : sType( StructureType::eDebugMarkerMarkerInfoEXT )
+      , pNext( nullptr )
+      , pMarkerName( pMarkerName_ )
+    {
+      memcpy( &color, color_.data(), 4 * sizeof( float ) );
+    }
+
+    DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
+    }
+
+    DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) );
+      return *this;
+    }
+    DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ )
+    {
+      pMarkerName = pMarkerName_;
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT& setColor( std::array<float,4> color_ )
+    {
+      memcpy( &color, color_.data(), 4 * sizeof( float ) );
+      return *this;
+    }
+
+    operator const VkDebugMarkerMarkerInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>(this);
+    }
+
+    bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pMarkerName == rhs.pMarkerName )
+          && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
+    }
+
+    bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    const char* pMarkerName;
+    float color[4];
+  };
+  static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
+
+  struct DedicatedAllocationImageCreateInfoNV
+  {
+    DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
+      : sType( StructureType::eDedicatedAllocationImageCreateInfoNV )
+      , pNext( nullptr )
+      , dedicatedAllocation( dedicatedAllocation_ )
+    {
+    }
+
+    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
+    }
+
+    DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) );
+      return *this;
+    }
+    DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+
+    operator const VkDedicatedAllocationImageCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>(this);
+    }
+
+    bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+    }
+
+    bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Bool32 dedicatedAllocation;
+  };
+  static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
+
+  struct DedicatedAllocationBufferCreateInfoNV
+  {
+    DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
+      : sType( StructureType::eDedicatedAllocationBufferCreateInfoNV )
+      , pNext( nullptr )
+      , dedicatedAllocation( dedicatedAllocation_ )
+    {
+    }
+
+    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
+    }
+
+    DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) );
+      return *this;
+    }
+    DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+
+    operator const VkDedicatedAllocationBufferCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>(this);
+    }
+
+    bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+    }
+
+    bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Bool32 dedicatedAllocation;
+  };
+  static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
+
+  struct DedicatedAllocationMemoryAllocateInfoNV
+  {
+    DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(), Buffer buffer_ = Buffer() )
+      : sType( StructureType::eDedicatedAllocationMemoryAllocateInfoNV )
+      , pNext( nullptr )
+      , image( image_ )
+      , buffer( buffer_ )
+    {
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) );
+      return *this;
+    }
+    DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator const VkDedicatedAllocationMemoryAllocateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>(this);
+    }
+
+    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Image image;
+    Buffer buffer;
+  };
+  static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportMemoryWin32HandleInfoNV
+  {
+    ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0 )
+      : sType( StructureType::eExportMemoryWin32HandleInfoNV )
+      , pNext( nullptr )
+      , pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+    {
+    }
+
+    ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
+    }
+
+    ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) );
+      return *this;
+    }
+    ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ )
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    operator const VkExportMemoryWin32HandleInfoNV&() const
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>(this);
+    }
+
+    bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess );
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+  };
+  static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32KeyedMutexAcquireReleaseInfoNV
+  {
+    Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr )
+      : sType( StructureType::eWin32KeyedMutexAcquireReleaseInfoNV )
+      , pNext( nullptr )
+      , acquireCount( acquireCount_ )
+      , pAcquireSyncs( pAcquireSyncs_ )
+      , pAcquireKeys( pAcquireKeys_ )
+      , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
+      , releaseCount( releaseCount_ )
+      , pReleaseSyncs( pReleaseSyncs_ )
+      , pReleaseKeys( pReleaseKeys_ )
+    {
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) );
+      return *this;
+    }
+    Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ )
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ )
+    {
+      pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ )
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+    operator const VkWin32KeyedMutexAcquireReleaseInfoNV&() const
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>(this);
+    }
+
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t acquireCount;
+    const DeviceMemory* pAcquireSyncs;
+    const uint64_t* pAcquireKeys;
+    const uint32_t* pAcquireTimeoutMilliseconds;
+    uint32_t releaseCount;
+    const DeviceMemory* pReleaseSyncs;
+    const uint64_t* pReleaseKeys;
+  };
+  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct DeviceGeneratedCommandsFeaturesNVX
+  {
+    DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
+      : sType( StructureType::eDeviceGeneratedCommandsFeaturesNVX )
+      , pNext( nullptr )
+      , computeBindingPointSupport( computeBindingPointSupport_ )
+    {
+    }
+
+    DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) );
+    }
+
+    DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) );
+      return *this;
+    }
+    DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
+    {
+      computeBindingPointSupport = computeBindingPointSupport_;
+      return *this;
+    }
+
+    operator const VkDeviceGeneratedCommandsFeaturesNVX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
+    }
+
+    bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
+    }
+
+    bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Bool32 computeBindingPointSupport;
+  };
+  static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
+
+  struct DeviceGeneratedCommandsLimitsNVX
+  {
+    DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
+      : sType( StructureType::eDeviceGeneratedCommandsLimitsNVX )
+      , pNext( nullptr )
+      , maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
+      , maxObjectEntryCounts( maxObjectEntryCounts_ )
+      , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
+      , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
+      , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
+    {
+    }
+
+    DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) );
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) );
+      return *this;
+    }
+    DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
+    {
+      maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
+    {
+      maxObjectEntryCounts = maxObjectEntryCounts_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
+    {
+      minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
+    {
+      minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
+    {
+      minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
+      return *this;
+    }
+
+    operator const VkDeviceGeneratedCommandsLimitsNVX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
+    }
+
+    bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
+          && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
+          && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
+          && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
+          && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
+    }
+
+    bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t maxIndirectCommandsLayoutTokenCount;
+    uint32_t maxObjectEntryCounts;
+    uint32_t minSequenceCountBufferOffsetAlignment;
+    uint32_t minSequenceIndexBufferOffsetAlignment;
+    uint32_t minCommandsTokenBufferOffsetAlignment;
+  };
+  static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
+
+  struct CmdReserveSpaceForCommandsInfoNVX
+  {
+    CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 )
+      : sType( StructureType::eCmdReserveSpaceForCommandsInfoNVX )
+      , pNext( nullptr )
+      , objectTable( objectTable_ )
+      , indirectCommandsLayout( indirectCommandsLayout_ )
+      , maxSequencesCount( maxSequencesCount_ )
+    {
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) );
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) );
+      return *this;
+    }
+    CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
+    {
+      objectTable = objectTable_;
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
+    {
+      maxSequencesCount = maxSequencesCount_;
+      return *this;
+    }
+
+    operator const VkCmdReserveSpaceForCommandsInfoNVX&() const
+    {
+      return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
+    }
+
+    bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectTable == rhs.objectTable )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( maxSequencesCount == rhs.maxSequencesCount );
+    }
+
+    bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ObjectTableNVX objectTable;
+    IndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t maxSequencesCount;
+  };
+  static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceFeatures2KHR
+  {
+    PhysicalDeviceFeatures2KHR( PhysicalDeviceFeatures features_ = PhysicalDeviceFeatures() )
+      : sType( StructureType::ePhysicalDeviceFeatures2KHR )
+      , pNext( nullptr )
+      , features( features_ )
+    {
+    }
+
+    PhysicalDeviceFeatures2KHR( VkPhysicalDeviceFeatures2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2KHR ) );
+    }
+
+    PhysicalDeviceFeatures2KHR& operator=( VkPhysicalDeviceFeatures2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2KHR ) );
+      return *this;
+    }
+    PhysicalDeviceFeatures2KHR& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures2KHR& setFeatures( PhysicalDeviceFeatures features_ )
+    {
+      features = features_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceFeatures2KHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures2KHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceFeatures2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( features == rhs.features );
+    }
+
+    bool operator!=( PhysicalDeviceFeatures2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    PhysicalDeviceFeatures features;
+  };
+  static_assert( sizeof( PhysicalDeviceFeatures2KHR ) == sizeof( VkPhysicalDeviceFeatures2KHR ), "struct and wrapper have different size!" );
+
+  struct PhysicalDevicePushDescriptorPropertiesKHR
+  {
+    PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = 0 )
+      : sType( StructureType::ePhysicalDevicePushDescriptorPropertiesKHR )
+      , pNext( nullptr )
+      , maxPushDescriptors( maxPushDescriptors_ )
+    {
+    }
+
+    PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
+    }
+
+    PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) );
+      return *this;
+    }
+    PhysicalDevicePushDescriptorPropertiesKHR& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevicePushDescriptorPropertiesKHR& setMaxPushDescriptors( uint32_t maxPushDescriptors_ )
+    {
+      maxPushDescriptors = maxPushDescriptors_;
+      return *this;
+    }
+
+    operator const VkPhysicalDevicePushDescriptorPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>(this);
+    }
+
+    bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPushDescriptors == rhs.maxPushDescriptors );
+    }
+
+    bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t maxPushDescriptors;
+  };
+  static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
+
+  struct PresentRegionsKHR
+  {
+    PresentRegionsKHR( uint32_t swapchainCount_ = 0, const PresentRegionKHR* pRegions_ = nullptr )
+      : sType( StructureType::ePresentRegionsKHR )
+      , pNext( nullptr )
+      , swapchainCount( swapchainCount_ )
+      , pRegions( pRegions_ )
+    {
+    }
+
+    PresentRegionsKHR( VkPresentRegionsKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentRegionsKHR ) );
+    }
+
+    PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentRegionsKHR ) );
+      return *this;
+    }
+    PresentRegionsKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentRegionsKHR& setSwapchainCount( uint32_t swapchainCount_ )
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentRegionsKHR& setPRegions( const PresentRegionKHR* pRegions_ )
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+    operator const VkPresentRegionsKHR&() const
+    {
+      return *reinterpret_cast<const VkPresentRegionsKHR*>(this);
+    }
+
+    bool operator==( PresentRegionsKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( PresentRegionsKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t swapchainCount;
+    const PresentRegionKHR* pRegions;
+  };
+  static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceVariablePointerFeaturesKHR
+  {
+    PhysicalDeviceVariablePointerFeaturesKHR( Bool32 variablePointersStorageBuffer_ = 0, Bool32 variablePointers_ = 0 )
+      : sType( StructureType::ePhysicalDeviceVariablePointerFeaturesKHR )
+      , pNext( nullptr )
+      , variablePointersStorageBuffer( variablePointersStorageBuffer_ )
+      , variablePointers( variablePointers_ )
+    {
+    }
+
+    PhysicalDeviceVariablePointerFeaturesKHR( VkPhysicalDeviceVariablePointerFeaturesKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeaturesKHR ) );
+    }
+
+    PhysicalDeviceVariablePointerFeaturesKHR& operator=( VkPhysicalDeviceVariablePointerFeaturesKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeaturesKHR ) );
+      return *this;
+    }
+    PhysicalDeviceVariablePointerFeaturesKHR& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointerFeaturesKHR& setVariablePointersStorageBuffer( Bool32 variablePointersStorageBuffer_ )
+    {
+      variablePointersStorageBuffer = variablePointersStorageBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointerFeaturesKHR& setVariablePointers( Bool32 variablePointers_ )
+    {
+      variablePointers = variablePointers_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceVariablePointerFeaturesKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVariablePointerFeaturesKHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceVariablePointerFeaturesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+          && ( variablePointers == rhs.variablePointers );
+    }
+
+    bool operator!=( PhysicalDeviceVariablePointerFeaturesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 variablePointersStorageBuffer;
+    Bool32 variablePointers;
+  };
+  static_assert( sizeof( PhysicalDeviceVariablePointerFeaturesKHR ) == sizeof( VkPhysicalDeviceVariablePointerFeaturesKHR ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceIDPropertiesKHR
+  {
+    operator const VkPhysicalDeviceIDPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceIDPropertiesKHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceIDPropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE_KHR * sizeof( uint8_t ) ) == 0 )
+          && ( deviceNodeMask == rhs.deviceNodeMask )
+          && ( deviceLUIDValid == rhs.deviceLUIDValid );
+    }
+
+    bool operator!=( PhysicalDeviceIDPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint8_t deviceUUID[VK_UUID_SIZE];
+    uint8_t driverUUID[VK_UUID_SIZE];
+    uint8_t deviceLUID[VK_LUID_SIZE_KHR];
+    uint32_t deviceNodeMask;
+    Bool32 deviceLUIDValid;
+  };
+  static_assert( sizeof( PhysicalDeviceIDPropertiesKHR ) == sizeof( VkPhysicalDeviceIDPropertiesKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportMemoryWin32HandleInfoKHR
+  {
+    ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 )
+      : sType( StructureType::eExportMemoryWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+      , name( name_ )
+    {
+    }
+
+    ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
+    }
+
+    ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) );
+      return *this;
+    }
+    ExportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ )
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator const VkExportMemoryWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct MemoryWin32HandlePropertiesKHR
+  {
+    operator const VkMemoryWin32HandlePropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>(this);
+    }
+
+    bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct MemoryFdPropertiesKHR
+  {
+    operator const VkMemoryFdPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>(this);
+    }
+
+    bool operator==( MemoryFdPropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryFdPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32KeyedMutexAcquireReleaseInfoKHR
+  {
+    Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeouts_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr )
+      : sType( StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR )
+      , pNext( nullptr )
+      , acquireCount( acquireCount_ )
+      , pAcquireSyncs( pAcquireSyncs_ )
+      , pAcquireKeys( pAcquireKeys_ )
+      , pAcquireTimeouts( pAcquireTimeouts_ )
+      , releaseCount( releaseCount_ )
+      , pReleaseSyncs( pReleaseSyncs_ )
+      , pReleaseKeys( pReleaseKeys_ )
+    {
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) );
+      return *this;
+    }
+    Win32KeyedMutexAcquireReleaseInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& setAcquireCount( uint32_t acquireCount_ )
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ )
+    {
+      pAcquireTimeouts = pAcquireTimeouts_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& setReleaseCount( uint32_t releaseCount_ )
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+    operator const VkWin32KeyedMutexAcquireReleaseInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>(this);
+    }
+
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeouts == rhs.pAcquireTimeouts )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t acquireCount;
+    const DeviceMemory* pAcquireSyncs;
+    const uint64_t* pAcquireKeys;
+    const uint32_t* pAcquireTimeouts;
+    uint32_t releaseCount;
+    const DeviceMemory* pReleaseSyncs;
+    const uint64_t* pReleaseKeys;
+  };
+  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportSemaphoreWin32HandleInfoKHR
+  {
+    ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 )
+      : sType( StructureType::eExportSemaphoreWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+      , name( name_ )
+    {
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) );
+      return *this;
+    }
+    ExportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ )
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator const VkExportSemaphoreWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct D3D12FenceSubmitInfoKHR
+  {
+    D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0, const uint64_t* pWaitSemaphoreValues_ = nullptr, uint32_t signalSemaphoreValuesCount_ = 0, const uint64_t* pSignalSemaphoreValues_ = nullptr )
+      : sType( StructureType::eD3D12FenceSubmitInfoKHR )
+      , pNext( nullptr )
+      , waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
+      , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
+      , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
+      , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+    {
+    }
+
+    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
+    }
+
+    D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) );
+      return *this;
+    }
+    D3D12FenceSubmitInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR& setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ )
+    {
+      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR& setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ )
+    {
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR& setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ )
+    {
+      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR& setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ )
+    {
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
+    }
+
+    operator const VkD3D12FenceSubmitInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>(this);
+    }
+
+    bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+    }
+
+    bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t waitSemaphoreValuesCount;
+    const uint64_t* pWaitSemaphoreValues;
+    uint32_t signalSemaphoreValuesCount;
+    const uint64_t* pSignalSemaphoreValues;
+  };
+  static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportFenceWin32HandleInfoKHR
+  {
+    ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 )
+      : sType( StructureType::eExportFenceWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+      , name( name_ )
+    {
+    }
+
+    ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
+    }
+
+    ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) );
+      return *this;
+    }
+    ExportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ )
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ )
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator const VkExportFenceWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct PhysicalDeviceMultiviewFeaturesKHX
+  {
+    PhysicalDeviceMultiviewFeaturesKHX( Bool32 multiview_ = 0, Bool32 multiviewGeometryShader_ = 0, Bool32 multiviewTessellationShader_ = 0 )
+      : sType( StructureType::ePhysicalDeviceMultiviewFeaturesKHX )
+      , pNext( nullptr )
+      , multiview( multiview_ )
+      , multiviewGeometryShader( multiviewGeometryShader_ )
+      , multiviewTessellationShader( multiviewTessellationShader_ )
+    {
+    }
+
+    PhysicalDeviceMultiviewFeaturesKHX( VkPhysicalDeviceMultiviewFeaturesKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeaturesKHX ) );
+    }
+
+    PhysicalDeviceMultiviewFeaturesKHX& operator=( VkPhysicalDeviceMultiviewFeaturesKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeaturesKHX ) );
+      return *this;
+    }
+    PhysicalDeviceMultiviewFeaturesKHX& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeaturesKHX& setMultiview( Bool32 multiview_ )
+    {
+      multiview = multiview_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeaturesKHX& setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ )
+    {
+      multiviewGeometryShader = multiviewGeometryShader_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeaturesKHX& setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ )
+    {
+      multiviewTessellationShader = multiviewTessellationShader_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceMultiviewFeaturesKHX&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeaturesKHX*>(this);
+    }
+
+    bool operator==( PhysicalDeviceMultiviewFeaturesKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multiview == rhs.multiview )
+          && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+          && ( multiviewTessellationShader == rhs.multiviewTessellationShader );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewFeaturesKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 multiview;
+    Bool32 multiviewGeometryShader;
+    Bool32 multiviewTessellationShader;
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewFeaturesKHX ) == sizeof( VkPhysicalDeviceMultiviewFeaturesKHX ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceMultiviewPropertiesKHX
+  {
+    operator const VkPhysicalDeviceMultiviewPropertiesKHX&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPropertiesKHX*>(this);
+    }
+
+    bool operator==( PhysicalDeviceMultiviewPropertiesKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+          && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewPropertiesKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t maxMultiviewViewCount;
+    uint32_t maxMultiviewInstanceIndex;
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewPropertiesKHX ) == sizeof( VkPhysicalDeviceMultiviewPropertiesKHX ), "struct and wrapper have different size!" );
+
+  struct RenderPassMultiviewCreateInfoKHX
+  {
+    RenderPassMultiviewCreateInfoKHX( uint32_t subpassCount_ = 0, const uint32_t* pViewMasks_ = nullptr, uint32_t dependencyCount_ = 0, const int32_t* pViewOffsets_ = nullptr, uint32_t correlationMaskCount_ = 0, const uint32_t* pCorrelationMasks_ = nullptr )
+      : sType( StructureType::eRenderPassMultiviewCreateInfoKHX )
+      , pNext( nullptr )
+      , subpassCount( subpassCount_ )
+      , pViewMasks( pViewMasks_ )
+      , dependencyCount( dependencyCount_ )
+      , pViewOffsets( pViewOffsets_ )
+      , correlationMaskCount( correlationMaskCount_ )
+      , pCorrelationMasks( pCorrelationMasks_ )
+    {
+    }
+
+    RenderPassMultiviewCreateInfoKHX( VkRenderPassMultiviewCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfoKHX ) );
+    }
+
+    RenderPassMultiviewCreateInfoKHX& operator=( VkRenderPassMultiviewCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfoKHX ) );
+      return *this;
+    }
+    RenderPassMultiviewCreateInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfoKHX& setSubpassCount( uint32_t subpassCount_ )
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfoKHX& setPViewMasks( const uint32_t* pViewMasks_ )
+    {
+      pViewMasks = pViewMasks_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfoKHX& setDependencyCount( uint32_t dependencyCount_ )
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfoKHX& setPViewOffsets( const int32_t* pViewOffsets_ )
+    {
+      pViewOffsets = pViewOffsets_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfoKHX& setCorrelationMaskCount( uint32_t correlationMaskCount_ )
+    {
+      correlationMaskCount = correlationMaskCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfoKHX& setPCorrelationMasks( const uint32_t* pCorrelationMasks_ )
+    {
+      pCorrelationMasks = pCorrelationMasks_;
+      return *this;
+    }
+
+    operator const VkRenderPassMultiviewCreateInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkRenderPassMultiviewCreateInfoKHX*>(this);
+    }
+
+    bool operator==( RenderPassMultiviewCreateInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pViewMasks == rhs.pViewMasks )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pViewOffsets == rhs.pViewOffsets )
+          && ( correlationMaskCount == rhs.correlationMaskCount )
+          && ( pCorrelationMasks == rhs.pCorrelationMasks );
+    }
+
+    bool operator!=( RenderPassMultiviewCreateInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t subpassCount;
+    const uint32_t* pViewMasks;
+    uint32_t dependencyCount;
+    const int32_t* pViewOffsets;
+    uint32_t correlationMaskCount;
+    const uint32_t* pCorrelationMasks;
+  };
+  static_assert( sizeof( RenderPassMultiviewCreateInfoKHX ) == sizeof( VkRenderPassMultiviewCreateInfoKHX ), "struct and wrapper have different size!" );
+
+  struct BindBufferMemoryInfoKHX
+  {
+    BindBufferMemoryInfoKHX( Buffer buffer_ = Buffer(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, uint32_t deviceIndexCount_ = 0, const uint32_t* pDeviceIndices_ = nullptr )
+      : sType( StructureType::eBindBufferMemoryInfoKHX )
+      , pNext( nullptr )
+      , buffer( buffer_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+      , deviceIndexCount( deviceIndexCount_ )
+      , pDeviceIndices( pDeviceIndices_ )
+    {
+    }
+
+    BindBufferMemoryInfoKHX( VkBindBufferMemoryInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindBufferMemoryInfoKHX ) );
+    }
+
+    BindBufferMemoryInfoKHX& operator=( VkBindBufferMemoryInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindBufferMemoryInfoKHX ) );
+      return *this;
+    }
+    BindBufferMemoryInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindBufferMemoryInfoKHX& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BindBufferMemoryInfoKHX& setMemory( DeviceMemory memory_ )
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    BindBufferMemoryInfoKHX& setMemoryOffset( DeviceSize memoryOffset_ )
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    BindBufferMemoryInfoKHX& setDeviceIndexCount( uint32_t deviceIndexCount_ )
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    BindBufferMemoryInfoKHX& setPDeviceIndices( const uint32_t* pDeviceIndices_ )
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+    operator const VkBindBufferMemoryInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkBindBufferMemoryInfoKHX*>(this);
+    }
+
+    bool operator==( BindBufferMemoryInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices );
+    }
+
+    bool operator!=( BindBufferMemoryInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Buffer buffer;
+    DeviceMemory memory;
+    DeviceSize memoryOffset;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+  };
+  static_assert( sizeof( BindBufferMemoryInfoKHX ) == sizeof( VkBindBufferMemoryInfoKHX ), "struct and wrapper have different size!" );
+
+  struct BindImageMemoryInfoKHX
+  {
+    BindImageMemoryInfoKHX( Image image_ = Image(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, uint32_t deviceIndexCount_ = 0, const uint32_t* pDeviceIndices_ = nullptr, uint32_t SFRRectCount_ = 0, const Rect2D* pSFRRects_ = nullptr )
+      : sType( StructureType::eBindImageMemoryInfoKHX )
+      , pNext( nullptr )
+      , image( image_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+      , deviceIndexCount( deviceIndexCount_ )
+      , pDeviceIndices( pDeviceIndices_ )
+      , SFRRectCount( SFRRectCount_ )
+      , pSFRRects( pSFRRects_ )
+    {
+    }
+
+    BindImageMemoryInfoKHX( VkBindImageMemoryInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindImageMemoryInfoKHX ) );
+    }
+
+    BindImageMemoryInfoKHX& operator=( VkBindImageMemoryInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindImageMemoryInfoKHX ) );
+      return *this;
+    }
+    BindImageMemoryInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemoryInfoKHX& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    BindImageMemoryInfoKHX& setMemory( DeviceMemory memory_ )
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    BindImageMemoryInfoKHX& setMemoryOffset( DeviceSize memoryOffset_ )
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    BindImageMemoryInfoKHX& setDeviceIndexCount( uint32_t deviceIndexCount_ )
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    BindImageMemoryInfoKHX& setPDeviceIndices( const uint32_t* pDeviceIndices_ )
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+    BindImageMemoryInfoKHX& setSFRRectCount( uint32_t SFRRectCount_ )
+    {
+      SFRRectCount = SFRRectCount_;
+      return *this;
+    }
+
+    BindImageMemoryInfoKHX& setPSFRRects( const Rect2D* pSFRRects_ )
+    {
+      pSFRRects = pSFRRects_;
+      return *this;
+    }
+
+    operator const VkBindImageMemoryInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkBindImageMemoryInfoKHX*>(this);
+    }
+
+    bool operator==( BindImageMemoryInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices )
+          && ( SFRRectCount == rhs.SFRRectCount )
+          && ( pSFRRects == rhs.pSFRRects );
+    }
+
+    bool operator!=( BindImageMemoryInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Image image;
+    DeviceMemory memory;
+    DeviceSize memoryOffset;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+    uint32_t SFRRectCount;
+    const Rect2D* pSFRRects;
+  };
+  static_assert( sizeof( BindImageMemoryInfoKHX ) == sizeof( VkBindImageMemoryInfoKHX ), "struct and wrapper have different size!" );
+
+  struct DeviceGroupRenderPassBeginInfoKHX
+  {
+    DeviceGroupRenderPassBeginInfoKHX( uint32_t deviceMask_ = 0, uint32_t deviceRenderAreaCount_ = 0, const Rect2D* pDeviceRenderAreas_ = nullptr )
+      : sType( StructureType::eDeviceGroupRenderPassBeginInfoKHX )
+      , pNext( nullptr )
+      , deviceMask( deviceMask_ )
+      , deviceRenderAreaCount( deviceRenderAreaCount_ )
+      , pDeviceRenderAreas( pDeviceRenderAreas_ )
+    {
+    }
+
+    DeviceGroupRenderPassBeginInfoKHX( VkDeviceGroupRenderPassBeginInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfoKHX ) );
+    }
+
+    DeviceGroupRenderPassBeginInfoKHX& operator=( VkDeviceGroupRenderPassBeginInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfoKHX ) );
+      return *this;
+    }
+    DeviceGroupRenderPassBeginInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfoKHX& setDeviceMask( uint32_t deviceMask_ )
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfoKHX& setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ )
+    {
+      deviceRenderAreaCount = deviceRenderAreaCount_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfoKHX& setPDeviceRenderAreas( const Rect2D* pDeviceRenderAreas_ )
+    {
+      pDeviceRenderAreas = pDeviceRenderAreas_;
+      return *this;
+    }
+
+    operator const VkDeviceGroupRenderPassBeginInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfoKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupRenderPassBeginInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask )
+          && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
+          && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
+    }
+
+    bool operator!=( DeviceGroupRenderPassBeginInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t deviceMask;
+    uint32_t deviceRenderAreaCount;
+    const Rect2D* pDeviceRenderAreas;
+  };
+  static_assert( sizeof( DeviceGroupRenderPassBeginInfoKHX ) == sizeof( VkDeviceGroupRenderPassBeginInfoKHX ), "struct and wrapper have different size!" );
+
+  struct DeviceGroupCommandBufferBeginInfoKHX
+  {
+    DeviceGroupCommandBufferBeginInfoKHX( uint32_t deviceMask_ = 0 )
+      : sType( StructureType::eDeviceGroupCommandBufferBeginInfoKHX )
+      , pNext( nullptr )
+      , deviceMask( deviceMask_ )
+    {
+    }
+
+    DeviceGroupCommandBufferBeginInfoKHX( VkDeviceGroupCommandBufferBeginInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfoKHX ) );
+    }
+
+    DeviceGroupCommandBufferBeginInfoKHX& operator=( VkDeviceGroupCommandBufferBeginInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfoKHX ) );
+      return *this;
+    }
+    DeviceGroupCommandBufferBeginInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupCommandBufferBeginInfoKHX& setDeviceMask( uint32_t deviceMask_ )
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    operator const VkDeviceGroupCommandBufferBeginInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfoKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupCommandBufferBeginInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( DeviceGroupCommandBufferBeginInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t deviceMask;
+  };
+  static_assert( sizeof( DeviceGroupCommandBufferBeginInfoKHX ) == sizeof( VkDeviceGroupCommandBufferBeginInfoKHX ), "struct and wrapper have different size!" );
+
+  struct DeviceGroupSubmitInfoKHX
+  {
+    DeviceGroupSubmitInfoKHX( uint32_t waitSemaphoreCount_ = 0, const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr, uint32_t commandBufferCount_ = 0, const uint32_t* pCommandBufferDeviceMasks_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr )
+      : sType( StructureType::eDeviceGroupSubmitInfoKHX )
+      , pNext( nullptr )
+      , waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
+      , commandBufferCount( commandBufferCount_ )
+      , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
+      , signalSemaphoreCount( signalSemaphoreCount_ )
+      , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
+    {
+    }
+
+    DeviceGroupSubmitInfoKHX( VkDeviceGroupSubmitInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfoKHX ) );
+    }
+
+    DeviceGroupSubmitInfoKHX& operator=( VkDeviceGroupSubmitInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfoKHX ) );
+      return *this;
+    }
+    DeviceGroupSubmitInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfoKHX& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfoKHX& setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ )
+    {
+      pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfoKHX& setCommandBufferCount( uint32_t commandBufferCount_ )
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfoKHX& setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ )
+    {
+      pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfoKHX& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfoKHX& setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ )
+    {
+      pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+    operator const VkDeviceGroupSubmitInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupSubmitInfoKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupSubmitInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
+    }
+
+    bool operator!=( DeviceGroupSubmitInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    const uint32_t* pWaitSemaphoreDeviceIndices;
+    uint32_t commandBufferCount;
+    const uint32_t* pCommandBufferDeviceMasks;
+    uint32_t signalSemaphoreCount;
+    const uint32_t* pSignalSemaphoreDeviceIndices;
+  };
+  static_assert( sizeof( DeviceGroupSubmitInfoKHX ) == sizeof( VkDeviceGroupSubmitInfoKHX ), "struct and wrapper have different size!" );
+
+  struct DeviceGroupBindSparseInfoKHX
+  {
+    DeviceGroupBindSparseInfoKHX( uint32_t resourceDeviceIndex_ = 0, uint32_t memoryDeviceIndex_ = 0 )
+      : sType( StructureType::eDeviceGroupBindSparseInfoKHX )
+      , pNext( nullptr )
+      , resourceDeviceIndex( resourceDeviceIndex_ )
+      , memoryDeviceIndex( memoryDeviceIndex_ )
+    {
+    }
+
+    DeviceGroupBindSparseInfoKHX( VkDeviceGroupBindSparseInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfoKHX ) );
+    }
+
+    DeviceGroupBindSparseInfoKHX& operator=( VkDeviceGroupBindSparseInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfoKHX ) );
+      return *this;
+    }
+    DeviceGroupBindSparseInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfoKHX& setResourceDeviceIndex( uint32_t resourceDeviceIndex_ )
+    {
+      resourceDeviceIndex = resourceDeviceIndex_;
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfoKHX& setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ )
+    {
+      memoryDeviceIndex = memoryDeviceIndex_;
+      return *this;
+    }
+
+    operator const VkDeviceGroupBindSparseInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupBindSparseInfoKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupBindSparseInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
+          && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+    }
+
+    bool operator!=( DeviceGroupBindSparseInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t resourceDeviceIndex;
+    uint32_t memoryDeviceIndex;
+  };
+  static_assert( sizeof( DeviceGroupBindSparseInfoKHX ) == sizeof( VkDeviceGroupBindSparseInfoKHX ), "struct and wrapper have different size!" );
+
+  struct ImageSwapchainCreateInfoKHX
+  {
+    ImageSwapchainCreateInfoKHX( SwapchainKHR swapchain_ = SwapchainKHR() )
+      : sType( StructureType::eImageSwapchainCreateInfoKHX )
+      , pNext( nullptr )
+      , swapchain( swapchain_ )
+    {
+    }
+
+    ImageSwapchainCreateInfoKHX( VkImageSwapchainCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHX ) );
+    }
+
+    ImageSwapchainCreateInfoKHX& operator=( VkImageSwapchainCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHX ) );
+      return *this;
+    }
+    ImageSwapchainCreateInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageSwapchainCreateInfoKHX& setSwapchain( SwapchainKHR swapchain_ )
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    operator const VkImageSwapchainCreateInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkImageSwapchainCreateInfoKHX*>(this);
+    }
+
+    bool operator==( ImageSwapchainCreateInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain );
+    }
+
+    bool operator!=( ImageSwapchainCreateInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SwapchainKHR swapchain;
+  };
+  static_assert( sizeof( ImageSwapchainCreateInfoKHX ) == sizeof( VkImageSwapchainCreateInfoKHX ), "struct and wrapper have different size!" );
+
+  struct BindImageMemorySwapchainInfoKHX
+  {
+    BindImageMemorySwapchainInfoKHX( SwapchainKHR swapchain_ = SwapchainKHR(), uint32_t imageIndex_ = 0 )
+      : sType( StructureType::eBindImageMemorySwapchainInfoKHX )
+      , pNext( nullptr )
+      , swapchain( swapchain_ )
+      , imageIndex( imageIndex_ )
+    {
+    }
+
+    BindImageMemorySwapchainInfoKHX( VkBindImageMemorySwapchainInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHX ) );
+    }
+
+    BindImageMemorySwapchainInfoKHX& operator=( VkBindImageMemorySwapchainInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHX ) );
+      return *this;
+    }
+    BindImageMemorySwapchainInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHX& setSwapchain( SwapchainKHR swapchain_ )
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHX& setImageIndex( uint32_t imageIndex_ )
+    {
+      imageIndex = imageIndex_;
+      return *this;
+    }
+
+    operator const VkBindImageMemorySwapchainInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHX*>(this);
+    }
+
+    bool operator==( BindImageMemorySwapchainInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( imageIndex == rhs.imageIndex );
+    }
+
+    bool operator!=( BindImageMemorySwapchainInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SwapchainKHR swapchain;
+    uint32_t imageIndex;
+  };
+  static_assert( sizeof( BindImageMemorySwapchainInfoKHX ) == sizeof( VkBindImageMemorySwapchainInfoKHX ), "struct and wrapper have different size!" );
+
+  struct AcquireNextImageInfoKHX
+  {
+    AcquireNextImageInfoKHX( SwapchainKHR swapchain_ = SwapchainKHR(), uint64_t timeout_ = 0, Semaphore semaphore_ = Semaphore(), Fence fence_ = Fence(), uint32_t deviceMask_ = 0 )
+      : sType( StructureType::eAcquireNextImageInfoKHX )
+      , pNext( nullptr )
+      , swapchain( swapchain_ )
+      , timeout( timeout_ )
+      , semaphore( semaphore_ )
+      , fence( fence_ )
+      , deviceMask( deviceMask_ )
+    {
+    }
+
+    AcquireNextImageInfoKHX( VkAcquireNextImageInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHX ) );
+    }
+
+    AcquireNextImageInfoKHX& operator=( VkAcquireNextImageInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHX ) );
+      return *this;
+    }
+    AcquireNextImageInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHX& setSwapchain( SwapchainKHR swapchain_ )
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHX& setTimeout( uint64_t timeout_ )
+    {
+      timeout = timeout_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHX& setSemaphore( Semaphore semaphore_ )
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHX& setFence( Fence fence_ )
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHX& setDeviceMask( uint32_t deviceMask_ )
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    operator const VkAcquireNextImageInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkAcquireNextImageInfoKHX*>(this);
+    }
+
+    bool operator==( AcquireNextImageInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( timeout == rhs.timeout )
+          && ( semaphore == rhs.semaphore )
+          && ( fence == rhs.fence )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( AcquireNextImageInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SwapchainKHR swapchain;
+    uint64_t timeout;
+    Semaphore semaphore;
+    Fence fence;
+    uint32_t deviceMask;
+  };
+  static_assert( sizeof( AcquireNextImageInfoKHX ) == sizeof( VkAcquireNextImageInfoKHX ), "struct and wrapper have different size!" );
+
+  struct HdrMetadataEXT
+  {
+    HdrMetadataEXT( XYColorEXT displayPrimaryRed_ = XYColorEXT(), XYColorEXT displayPrimaryGreen_ = XYColorEXT(), XYColorEXT displayPrimaryBlue_ = XYColorEXT(), XYColorEXT whitePoint_ = XYColorEXT(), float maxLuminance_ = 0, float minLuminance_ = 0, float maxContentLightLevel_ = 0, float maxFrameAverageLightLevel_ = 0 )
+      : sType( StructureType::eHdrMetadataEXT )
+      , pNext( nullptr )
+      , displayPrimaryRed( displayPrimaryRed_ )
+      , displayPrimaryGreen( displayPrimaryGreen_ )
+      , displayPrimaryBlue( displayPrimaryBlue_ )
+      , whitePoint( whitePoint_ )
+      , maxLuminance( maxLuminance_ )
+      , minLuminance( minLuminance_ )
+      , maxContentLightLevel( maxContentLightLevel_ )
+      , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
+    {
+    }
+
+    HdrMetadataEXT( VkHdrMetadataEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( HdrMetadataEXT ) );
+    }
+
+    HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( HdrMetadataEXT ) );
+      return *this;
+    }
+    HdrMetadataEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setDisplayPrimaryRed( XYColorEXT displayPrimaryRed_ )
+    {
+      displayPrimaryRed = displayPrimaryRed_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setDisplayPrimaryGreen( XYColorEXT displayPrimaryGreen_ )
+    {
+      displayPrimaryGreen = displayPrimaryGreen_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setDisplayPrimaryBlue( XYColorEXT displayPrimaryBlue_ )
+    {
+      displayPrimaryBlue = displayPrimaryBlue_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setWhitePoint( XYColorEXT whitePoint_ )
+    {
+      whitePoint = whitePoint_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setMaxLuminance( float maxLuminance_ )
+    {
+      maxLuminance = maxLuminance_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setMinLuminance( float minLuminance_ )
+    {
+      minLuminance = minLuminance_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setMaxContentLightLevel( float maxContentLightLevel_ )
+    {
+      maxContentLightLevel = maxContentLightLevel_;
+      return *this;
+    }
+
+    HdrMetadataEXT& setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ )
+    {
+      maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
+      return *this;
+    }
+
+    operator const VkHdrMetadataEXT&() const
+    {
+      return *reinterpret_cast<const VkHdrMetadataEXT*>(this);
+    }
+
+    bool operator==( HdrMetadataEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayPrimaryRed == rhs.displayPrimaryRed )
+          && ( displayPrimaryGreen == rhs.displayPrimaryGreen )
+          && ( displayPrimaryBlue == rhs.displayPrimaryBlue )
+          && ( whitePoint == rhs.whitePoint )
+          && ( maxLuminance == rhs.maxLuminance )
+          && ( minLuminance == rhs.minLuminance )
+          && ( maxContentLightLevel == rhs.maxContentLightLevel )
+          && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
+    }
+
+    bool operator!=( HdrMetadataEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    XYColorEXT displayPrimaryRed;
+    XYColorEXT displayPrimaryGreen;
+    XYColorEXT displayPrimaryBlue;
+    XYColorEXT whitePoint;
+    float maxLuminance;
+    float minLuminance;
+    float maxContentLightLevel;
+    float maxFrameAverageLightLevel;
+  };
+  static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
+
+  struct PresentTimesInfoGOOGLE
+  {
+    PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0, const PresentTimeGOOGLE* pTimes_ = nullptr )
+      : sType( StructureType::ePresentTimesInfoGOOGLE )
+      , pNext( nullptr )
+      , swapchainCount( swapchainCount_ )
+      , pTimes( pTimes_ )
+    {
+    }
+
+    PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) );
+    }
+
+    PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) );
+      return *this;
+    }
+    PresentTimesInfoGOOGLE& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE& setSwapchainCount( uint32_t swapchainCount_ )
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE& setPTimes( const PresentTimeGOOGLE* pTimes_ )
+    {
+      pTimes = pTimes_;
+      return *this;
+    }
+
+    operator const VkPresentTimesInfoGOOGLE&() const
+    {
+      return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>(this);
+    }
+
+    bool operator==( PresentTimesInfoGOOGLE const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pTimes == rhs.pTimes );
+    }
+
+    bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t swapchainCount;
+    const PresentTimeGOOGLE* pTimes;
+  };
+  static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  struct IOSSurfaceCreateInfoMVK
+  {
+    IOSSurfaceCreateInfoMVK( IOSSurfaceCreateFlagsMVK flags_ = IOSSurfaceCreateFlagsMVK(), const void* pView_ = nullptr )
+      : sType( StructureType::eIOSSurfaceCreateInfoMVK )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , pView( pView_ )
+    {
+    }
+
+    IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
+    }
+
+    IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) );
+      return *this;
+    }
+    IOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK& setFlags( IOSSurfaceCreateFlagsMVK flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK& setPView( const void* pView_ )
+    {
+      pView = pView_;
+      return *this;
+    }
+
+    operator const VkIOSSurfaceCreateInfoMVK&() const
+    {
+      return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>(this);
+    }
+
+    bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+    }
+
+    bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    IOSSurfaceCreateFlagsMVK flags;
+    const void* pView;
+  };
+  static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  struct MacOSSurfaceCreateInfoMVK
+  {
+    MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateFlagsMVK flags_ = MacOSSurfaceCreateFlagsMVK(), const void* pView_ = nullptr )
+      : sType( StructureType::eMacOSSurfaceCreateInfoMVK )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , pView( pView_ )
+    {
+    }
+
+    MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
+    }
+
+    MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) );
+      return *this;
+    }
+    MacOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK& setFlags( MacOSSurfaceCreateFlagsMVK flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK& setPView( const void* pView_ )
+    {
+      pView = pView_;
+      return *this;
+    }
+
+    operator const VkMacOSSurfaceCreateInfoMVK&() const
+    {
+      return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>(this);
+    }
+
+    bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+    }
+
+    bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    MacOSSurfaceCreateFlagsMVK flags;
+    const void* pView;
+  };
+  static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  struct PipelineViewportWScalingStateCreateInfoNV
+  {
+    PipelineViewportWScalingStateCreateInfoNV( Bool32 viewportWScalingEnable_ = 0, uint32_t viewportCount_ = 0, const ViewportWScalingNV* pViewportWScalings_ = nullptr )
+      : sType( StructureType::ePipelineViewportWScalingStateCreateInfoNV )
+      , pNext( nullptr )
+      , viewportWScalingEnable( viewportWScalingEnable_ )
+      , viewportCount( viewportCount_ )
+      , pViewportWScalings( pViewportWScalings_ )
+    {
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) );
+      return *this;
+    }
+    PipelineViewportWScalingStateCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV& setViewportWScalingEnable( Bool32 viewportWScalingEnable_ )
+    {
+      viewportWScalingEnable = viewportWScalingEnable_;
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ )
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV& setPViewportWScalings( const ViewportWScalingNV* pViewportWScalings_ )
+    {
+      pViewportWScalings = pViewportWScalings_;
+      return *this;
+    }
+
+    operator const VkPipelineViewportWScalingStateCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>(this);
+    }
+
+    bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewportWScalingEnable == rhs.viewportWScalingEnable )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportWScalings == rhs.pViewportWScalings );
+    }
+
+    bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Bool32 viewportWScalingEnable;
+    uint32_t viewportCount;
+    const ViewportWScalingNV* pViewportWScalings;
+  };
+  static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceDiscardRectanglePropertiesEXT
+  {
+    PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = 0 )
+      : sType( StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT )
+      , pNext( nullptr )
+      , maxDiscardRectangles( maxDiscardRectangles_ )
+    {
+    }
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
+    }
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) );
+      return *this;
+    }
+    PhysicalDeviceDiscardRectanglePropertiesEXT& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT& setMaxDiscardRectangles( uint32_t maxDiscardRectangles_ )
+    {
+      maxDiscardRectangles = maxDiscardRectangles_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceDiscardRectanglePropertiesEXT&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>(this);
+    }
+
+    bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
+    }
+
+    bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t maxDiscardRectangles;
+  };
+  static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+  {
+    operator const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>(this);
+    }
+
+    bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 perViewPositionAllComponents;
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceSurfaceInfo2KHR
+  {
+    PhysicalDeviceSurfaceInfo2KHR( SurfaceKHR surface_ = SurfaceKHR() )
+      : sType( StructureType::ePhysicalDeviceSurfaceInfo2KHR )
+      , pNext( nullptr )
+      , surface( surface_ )
+    {
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) );
+      return *this;
+    }
+    PhysicalDeviceSurfaceInfo2KHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR& setSurface( SurfaceKHR surface_ )
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceSurfaceInfo2KHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surface == rhs.surface );
+    }
+
+    bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SurfaceKHR surface;
+  };
+  static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
+
+  struct PhysicalDevice16BitStorageFeaturesKHR
+  {
+    PhysicalDevice16BitStorageFeaturesKHR( Bool32 storageBuffer16BitAccess_ = 0, Bool32 uniformAndStorageBuffer16BitAccess_ = 0, Bool32 storagePushConstant16_ = 0, Bool32 storageInputOutput16_ = 0 )
+      : sType( StructureType::ePhysicalDevice16BitStorageFeaturesKHR )
+      , pNext( nullptr )
+      , storageBuffer16BitAccess( storageBuffer16BitAccess_ )
+      , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
+      , storagePushConstant16( storagePushConstant16_ )
+      , storageInputOutput16( storageInputOutput16_ )
+    {
+    }
+
+    PhysicalDevice16BitStorageFeaturesKHR( VkPhysicalDevice16BitStorageFeaturesKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeaturesKHR ) );
+    }
+
+    PhysicalDevice16BitStorageFeaturesKHR& operator=( VkPhysicalDevice16BitStorageFeaturesKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeaturesKHR ) );
+      return *this;
+    }
+    PhysicalDevice16BitStorageFeaturesKHR& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeaturesKHR& setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ )
+    {
+      storageBuffer16BitAccess = storageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeaturesKHR& setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ )
+    {
+      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeaturesKHR& setStoragePushConstant16( Bool32 storagePushConstant16_ )
+    {
+      storagePushConstant16 = storagePushConstant16_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeaturesKHR& setStorageInputOutput16( Bool32 storageInputOutput16_ )
+    {
+      storageInputOutput16 = storageInputOutput16_;
+      return *this;
+    }
+
+    operator const VkPhysicalDevice16BitStorageFeaturesKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeaturesKHR*>(this);
+    }
+
+    bool operator==( PhysicalDevice16BitStorageFeaturesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+          && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+          && ( storagePushConstant16 == rhs.storagePushConstant16 )
+          && ( storageInputOutput16 == rhs.storageInputOutput16 );
+    }
+
+    bool operator!=( PhysicalDevice16BitStorageFeaturesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 storageBuffer16BitAccess;
+    Bool32 uniformAndStorageBuffer16BitAccess;
+    Bool32 storagePushConstant16;
+    Bool32 storageInputOutput16;
+  };
+  static_assert( sizeof( PhysicalDevice16BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice16BitStorageFeaturesKHR ), "struct and wrapper have different size!" );
+
+  struct BufferMemoryRequirementsInfo2KHR
+  {
+    BufferMemoryRequirementsInfo2KHR( Buffer buffer_ = Buffer() )
+      : sType( StructureType::eBufferMemoryRequirementsInfo2KHR )
+      , pNext( nullptr )
+      , buffer( buffer_ )
+    {
+    }
+
+    BufferMemoryRequirementsInfo2KHR( VkBufferMemoryRequirementsInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2KHR ) );
+    }
+
+    BufferMemoryRequirementsInfo2KHR& operator=( VkBufferMemoryRequirementsInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2KHR ) );
+      return *this;
+    }
+    BufferMemoryRequirementsInfo2KHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferMemoryRequirementsInfo2KHR& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator const VkBufferMemoryRequirementsInfo2KHR&() const
+    {
+      return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2KHR*>(this);
+    }
+
+    bool operator==( BufferMemoryRequirementsInfo2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( BufferMemoryRequirementsInfo2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Buffer buffer;
+  };
+  static_assert( sizeof( BufferMemoryRequirementsInfo2KHR ) == sizeof( VkBufferMemoryRequirementsInfo2KHR ), "struct and wrapper have different size!" );
+
+  struct ImageMemoryRequirementsInfo2KHR
+  {
+    ImageMemoryRequirementsInfo2KHR( Image image_ = Image() )
+      : sType( StructureType::eImageMemoryRequirementsInfo2KHR )
+      , pNext( nullptr )
+      , image( image_ )
+    {
+    }
+
+    ImageMemoryRequirementsInfo2KHR( VkImageMemoryRequirementsInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2KHR ) );
+    }
+
+    ImageMemoryRequirementsInfo2KHR& operator=( VkImageMemoryRequirementsInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2KHR ) );
+      return *this;
+    }
+    ImageMemoryRequirementsInfo2KHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageMemoryRequirementsInfo2KHR& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    operator const VkImageMemoryRequirementsInfo2KHR&() const
+    {
+      return *reinterpret_cast<const VkImageMemoryRequirementsInfo2KHR*>(this);
+    }
+
+    bool operator==( ImageMemoryRequirementsInfo2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+    }
+
+    bool operator!=( ImageMemoryRequirementsInfo2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Image image;
+  };
+  static_assert( sizeof( ImageMemoryRequirementsInfo2KHR ) == sizeof( VkImageMemoryRequirementsInfo2KHR ), "struct and wrapper have different size!" );
+
+  struct ImageSparseMemoryRequirementsInfo2KHR
+  {
+    ImageSparseMemoryRequirementsInfo2KHR( Image image_ = Image() )
+      : sType( StructureType::eImageSparseMemoryRequirementsInfo2KHR )
+      , pNext( nullptr )
+      , image( image_ )
+    {
+    }
+
+    ImageSparseMemoryRequirementsInfo2KHR( VkImageSparseMemoryRequirementsInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2KHR ) );
+    }
+
+    ImageSparseMemoryRequirementsInfo2KHR& operator=( VkImageSparseMemoryRequirementsInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2KHR ) );
+      return *this;
+    }
+    ImageSparseMemoryRequirementsInfo2KHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageSparseMemoryRequirementsInfo2KHR& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    operator const VkImageSparseMemoryRequirementsInfo2KHR&() const
+    {
+      return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2KHR*>(this);
+    }
+
+    bool operator==( ImageSparseMemoryRequirementsInfo2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+    }
+
+    bool operator!=( ImageSparseMemoryRequirementsInfo2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Image image;
+  };
+  static_assert( sizeof( ImageSparseMemoryRequirementsInfo2KHR ) == sizeof( VkImageSparseMemoryRequirementsInfo2KHR ), "struct and wrapper have different size!" );
+
+  struct MemoryRequirements2KHR
+  {
+    operator const VkMemoryRequirements2KHR&() const
+    {
+      return *reinterpret_cast<const VkMemoryRequirements2KHR*>(this);
+    }
+
+    bool operator==( MemoryRequirements2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+    }
+
+    bool operator!=( MemoryRequirements2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    MemoryRequirements memoryRequirements;
+  };
+  static_assert( sizeof( MemoryRequirements2KHR ) == sizeof( VkMemoryRequirements2KHR ), "struct and wrapper have different size!" );
+
+  struct MemoryDedicatedRequirementsKHR
+  {
+    operator const VkMemoryDedicatedRequirementsKHR&() const
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedRequirementsKHR*>(this);
+    }
+
+    bool operator==( MemoryDedicatedRequirementsKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
+          && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
+    }
+
+    bool operator!=( MemoryDedicatedRequirementsKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 prefersDedicatedAllocation;
+    Bool32 requiresDedicatedAllocation;
+  };
+  static_assert( sizeof( MemoryDedicatedRequirementsKHR ) == sizeof( VkMemoryDedicatedRequirementsKHR ), "struct and wrapper have different size!" );
+
+  struct MemoryDedicatedAllocateInfoKHR
+  {
+    MemoryDedicatedAllocateInfoKHR( Image image_ = Image(), Buffer buffer_ = Buffer() )
+      : sType( StructureType::eMemoryDedicatedAllocateInfoKHR )
+      , pNext( nullptr )
+      , image( image_ )
+      , buffer( buffer_ )
+    {
+    }
+
+    MemoryDedicatedAllocateInfoKHR( VkMemoryDedicatedAllocateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfoKHR ) );
+    }
+
+    MemoryDedicatedAllocateInfoKHR& operator=( VkMemoryDedicatedAllocateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfoKHR ) );
+      return *this;
+    }
+    MemoryDedicatedAllocateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfoKHR& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfoKHR& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator const VkMemoryDedicatedAllocateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfoKHR*>(this);
+    }
+
+    bool operator==( MemoryDedicatedAllocateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( MemoryDedicatedAllocateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Image image;
+    Buffer buffer;
+  };
+  static_assert( sizeof( MemoryDedicatedAllocateInfoKHR ) == sizeof( VkMemoryDedicatedAllocateInfoKHR ), "struct and wrapper have different size!" );
+
+  struct TextureLODGatherFormatPropertiesAMD
+  {
+    operator const VkTextureLODGatherFormatPropertiesAMD&() const
+    {
+      return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>(this);
+    }
+
+    bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
+    }
+
+    bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 supportsTextureGatherLODBiasAMD;
+  };
+  static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
+
+  struct PipelineCoverageToColorStateCreateInfoNV
+  {
+    PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateFlagsNV flags_ = PipelineCoverageToColorStateCreateFlagsNV(), Bool32 coverageToColorEnable_ = 0, uint32_t coverageToColorLocation_ = 0 )
+      : sType( StructureType::ePipelineCoverageToColorStateCreateInfoNV )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , coverageToColorEnable( coverageToColorEnable_ )
+      , coverageToColorLocation( coverageToColorLocation_ )
+    {
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) );
+      return *this;
+    }
+    PipelineCoverageToColorStateCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV& setFlags( PipelineCoverageToColorStateCreateFlagsNV flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorEnable( Bool32 coverageToColorEnable_ )
+    {
+      coverageToColorEnable = coverageToColorEnable_;
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorLocation( uint32_t coverageToColorLocation_ )
+    {
+      coverageToColorLocation = coverageToColorLocation_;
+      return *this;
+    }
+
+    operator const VkPipelineCoverageToColorStateCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>(this);
+    }
+
+    bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageToColorEnable == rhs.coverageToColorEnable )
+          && ( coverageToColorLocation == rhs.coverageToColorLocation );
+    }
+
+    bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineCoverageToColorStateCreateFlagsNV flags;
+    Bool32 coverageToColorEnable;
+    uint32_t coverageToColorLocation;
+  };
+  static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT
+  {
+    operator const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>(this);
+    }
+
+    bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+          && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
+    }
+
+    bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 filterMinmaxSingleComponentFormats;
+    Bool32 filterMinmaxImageComponentMapping;
+  };
+  static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
+  {
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( Bool32 advancedBlendCoherentOperations_ = 0 )
+      : sType( StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT )
+      , pNext( nullptr )
+      , advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
+    {
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) );
+      return *this;
+    }
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ )
+    {
+      advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>(this);
+    }
+
+    bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    Bool32 advancedBlendCoherentOperations;
+  };
+  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
+  {
+    operator const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>(this);
+    }
+
+    bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
+          && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
+          && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
+          && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
+          && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
+          && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t advancedBlendMaxColorAttachments;
+    Bool32 advancedBlendIndependentBlend;
+    Bool32 advancedBlendNonPremultipliedSrcColor;
+    Bool32 advancedBlendNonPremultipliedDstColor;
+    Bool32 advancedBlendCorrelatedOverlap;
+    Bool32 advancedBlendAllOperations;
+  };
+  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
+
+  enum class SubpassContents
+  {
+    eInline = VK_SUBPASS_CONTENTS_INLINE,
+    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
+  };
+
+  struct PresentInfoKHR
+  {
+    PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
+      : sType( StructureType::ePresentInfoKHR )
+      , pNext( nullptr )
+      , waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , swapchainCount( swapchainCount_ )
+      , pSwapchains( pSwapchains_ )
+      , pImageIndices( pImageIndices_ )
+      , pResults( pResults_ )
+    {
+    }
+
+    PresentInfoKHR( VkPresentInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentInfoKHR ) );
+    }
+
+    PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PresentInfoKHR ) );
+      return *this;
+    }
+    PresentInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+    PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
+    {
+      pSwapchains = pSwapchains_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
+    {
+      pImageIndices = pImageIndices_;
+      return *this;
+    }
+
+    PresentInfoKHR& setPResults( Result* pResults_ )
+    {
+      pResults = pResults_;
+      return *this;
+    }
+
+    operator const VkPresentInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkPresentInfoKHR*>(this);
+    }
+
+    bool operator==( PresentInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pSwapchains == rhs.pSwapchains )
+          && ( pImageIndices == rhs.pImageIndices )
+          && ( pResults == rhs.pResults );
+    }
+
+    bool operator!=( PresentInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    const Semaphore* pWaitSemaphores;
+    uint32_t swapchainCount;
+    const SwapchainKHR* pSwapchains;
+    const uint32_t* pImageIndices;
+    Result* pResults;
+  };
+  static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class DynamicState
+  {
+    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+    eScissor = VK_DYNAMIC_STATE_SCISSOR,
+    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+    eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+    eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT
+  };
+
+  struct PipelineDynamicStateCreateInfo
+  {
+    PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
+      : sType( StructureType::ePipelineDynamicStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , dynamicStateCount( dynamicStateCount_ )
+      , pDynamicStates( pDynamicStates_ )
+    {
+    }
+
+    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
+    }
+
+    PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) );
+      return *this;
+    }
+    PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
+    {
+      dynamicStateCount = dynamicStateCount_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
+    {
+      pDynamicStates = pDynamicStates_;
+      return *this;
+    }
+
+    operator const VkPipelineDynamicStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dynamicStateCount == rhs.dynamicStateCount )
+          && ( pDynamicStates == rhs.pDynamicStates );
+    }
+
+    bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineDynamicStateCreateFlags flags;
+    uint32_t dynamicStateCount;
+    const DynamicState* pDynamicStates;
+  };
+  static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class DescriptorUpdateTemplateTypeKHR
+  {
+    eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR,
+    ePushDescriptors = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
+  };
+
+  struct DescriptorUpdateTemplateCreateInfoKHR
+  {
+    DescriptorUpdateTemplateCreateInfoKHR( DescriptorUpdateTemplateCreateFlagsKHR flags_ = DescriptorUpdateTemplateCreateFlagsKHR(), uint32_t descriptorUpdateEntryCount_ = 0, const DescriptorUpdateTemplateEntryKHR* pDescriptorUpdateEntries_ = nullptr, DescriptorUpdateTemplateTypeKHR templateType_ = DescriptorUpdateTemplateTypeKHR::eDescriptorSet, DescriptorSetLayout descriptorSetLayout_ = DescriptorSetLayout(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, PipelineLayout pipelineLayout_ = PipelineLayout(), uint32_t set_ = 0 )
+      : sType( StructureType::eDescriptorUpdateTemplateCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
+      , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
+      , templateType( templateType_ )
+      , descriptorSetLayout( descriptorSetLayout_ )
+      , pipelineBindPoint( pipelineBindPoint_ )
+      , pipelineLayout( pipelineLayout_ )
+      , set( set_ )
+    {
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR( VkDescriptorUpdateTemplateCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfoKHR ) );
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& operator=( VkDescriptorUpdateTemplateCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfoKHR ) );
+      return *this;
+    }
+    DescriptorUpdateTemplateCreateInfoKHR& setPNext( void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setFlags( DescriptorUpdateTemplateCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ )
+    {
+      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntryKHR* pDescriptorUpdateEntries_ )
+    {
+      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setTemplateType( DescriptorUpdateTemplateTypeKHR templateType_ )
+    {
+      templateType = templateType_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ )
+    {
+      descriptorSetLayout = descriptorSetLayout_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setPipelineLayout( PipelineLayout pipelineLayout_ )
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfoKHR& setSet( uint32_t set_ )
+    {
+      set = set_;
+      return *this;
+    }
+
+    operator const VkDescriptorUpdateTemplateCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfoKHR*>(this);
+    }
+
+    bool operator==( DescriptorUpdateTemplateCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
+          && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
+          && ( templateType == rhs.templateType )
+          && ( descriptorSetLayout == rhs.descriptorSetLayout )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( set == rhs.set );
+    }
+
+    bool operator!=( DescriptorUpdateTemplateCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    DescriptorUpdateTemplateCreateFlagsKHR flags;
+    uint32_t descriptorUpdateEntryCount;
+    const DescriptorUpdateTemplateEntryKHR* pDescriptorUpdateEntries;
+    DescriptorUpdateTemplateTypeKHR templateType;
+    DescriptorSetLayout descriptorSetLayout;
+    PipelineBindPoint pipelineBindPoint;
+    PipelineLayout pipelineLayout;
+    uint32_t set;
+  };
+  static_assert( sizeof( DescriptorUpdateTemplateCreateInfoKHR ) == sizeof( VkDescriptorUpdateTemplateCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class ObjectType
+  {
+    eUnknown = VK_OBJECT_TYPE_UNKNOWN,
+    eInstance = VK_OBJECT_TYPE_INSTANCE,
+    ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
+    eDevice = VK_OBJECT_TYPE_DEVICE,
+    eQueue = VK_OBJECT_TYPE_QUEUE,
+    eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
+    eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
+    eFence = VK_OBJECT_TYPE_FENCE,
+    eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
+    eBuffer = VK_OBJECT_TYPE_BUFFER,
+    eImage = VK_OBJECT_TYPE_IMAGE,
+    eEvent = VK_OBJECT_TYPE_EVENT,
+    eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
+    eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
+    eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
+    eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
+    ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
+    ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
+    eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
+    ePipeline = VK_OBJECT_TYPE_PIPELINE,
+    eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
+    eSampler = VK_OBJECT_TYPE_SAMPLER,
+    eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
+    eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
+    eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
+    eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
+    eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
+    eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
+    eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
+    eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
+    eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
+    eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
+    eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX,
+    eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX
+  };
+
+  enum class QueueFlagBits
+  {
+    eGraphics = VK_QUEUE_GRAPHICS_BIT,
+    eCompute = VK_QUEUE_COMPUTE_BIT,
+    eTransfer = VK_QUEUE_TRANSFER_BIT,
+    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
+  };
+
+  using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
+
+  VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
+  {
+    return QueueFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
+  {
+    return ~( QueueFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueueFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding)
+    };
+  };
+
+  struct QueueFamilyProperties
+  {
+    operator const VkQueueFamilyProperties&() const
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
+    }
+
+    bool operator==( QueueFamilyProperties const& rhs ) const
+    {
+      return ( queueFlags == rhs.queueFlags )
+          && ( queueCount == rhs.queueCount )
+          && ( timestampValidBits == rhs.timestampValidBits )
+          && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
+    }
+
+    bool operator!=( QueueFamilyProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    QueueFlags queueFlags;
+    uint32_t queueCount;
+    uint32_t timestampValidBits;
+    Extent3D minImageTransferGranularity;
+  };
+  static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
+
+  struct QueueFamilyProperties2KHR
+  {
+    operator const VkQueueFamilyProperties2KHR&() const
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties2KHR*>(this);
+    }
+
+    bool operator==( QueueFamilyProperties2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyProperties == rhs.queueFamilyProperties );
+    }
+
+    bool operator!=( QueueFamilyProperties2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    QueueFamilyProperties queueFamilyProperties;
+  };
+  static_assert( sizeof( QueueFamilyProperties2KHR ) == sizeof( VkQueueFamilyProperties2KHR ), "struct and wrapper have different size!" );
+
+  enum class MemoryPropertyFlagBits
+  {
+    eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+    eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+    eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+    eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+    eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT
+  };
+
+  using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
+
+  VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
+  {
+    return MemoryPropertyFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
+  {
+    return ~( MemoryPropertyFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<MemoryPropertyFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated)
+    };
+  };
+
+  struct MemoryType
+  {
+    operator const VkMemoryType&() const
+    {
+      return *reinterpret_cast<const VkMemoryType*>(this);
+    }
+
+    bool operator==( MemoryType const& rhs ) const
+    {
+      return ( propertyFlags == rhs.propertyFlags )
+          && ( heapIndex == rhs.heapIndex );
+    }
+
+    bool operator!=( MemoryType const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    MemoryPropertyFlags propertyFlags;
+    uint32_t heapIndex;
+  };
+  static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+
+  enum class MemoryHeapFlagBits
+  {
+    eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
+    eMultiInstanceKHX = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHX
+  };
+
+  using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
+
+  VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
+  {
+    return MemoryHeapFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
+  {
+    return ~( MemoryHeapFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<MemoryHeapFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstanceKHX)
+    };
+  };
+
+  struct MemoryHeap
+  {
+    operator const VkMemoryHeap&() const
+    {
+      return *reinterpret_cast<const VkMemoryHeap*>(this);
+    }
+
+    bool operator==( MemoryHeap const& rhs ) const
+    {
+      return ( size == rhs.size )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( MemoryHeap const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DeviceSize size;
+    MemoryHeapFlags flags;
+  };
+  static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceMemoryProperties
+  {
+    operator const VkPhysicalDeviceMemoryProperties&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>(this);
+    }
+
+    bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const
+    {
+      return ( memoryTypeCount == rhs.memoryTypeCount )
+          && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 )
+          && ( memoryHeapCount == rhs.memoryHeapCount )
+          && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t memoryTypeCount;
+    MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
+    uint32_t memoryHeapCount;
+    MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceMemoryProperties2KHR
+  {
+    operator const VkPhysicalDeviceMemoryProperties2KHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2KHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceMemoryProperties2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryProperties == rhs.memoryProperties );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    PhysicalDeviceMemoryProperties memoryProperties;
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryProperties2KHR ) == sizeof( VkPhysicalDeviceMemoryProperties2KHR ), "struct and wrapper have different size!" );
+
+  enum class AccessFlagBits
+  {
+    eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
+    eIndexRead = VK_ACCESS_INDEX_READ_BIT,
+    eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+    eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
+    eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+    eShaderRead = VK_ACCESS_SHADER_READ_BIT,
+    eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
+    eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+    eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+    eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
+    eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
+    eHostRead = VK_ACCESS_HOST_READ_BIT,
+    eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
+    eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
+    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+    eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
+    eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX,
+    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT
+  };
+
+  using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
+
+  VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
+  {
+    return AccessFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
+  {
+    return ~( AccessFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<AccessFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT)
+    };
+  };
+
+  struct MemoryBarrier
+  {
+    MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() )
+      : sType( StructureType::eMemoryBarrier )
+      , pNext( nullptr )
+      , srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+    {
+    }
+
+    MemoryBarrier( VkMemoryBarrier const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryBarrier ) );
+    }
+
+    MemoryBarrier& operator=( VkMemoryBarrier const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryBarrier ) );
+      return *this;
+    }
+    MemoryBarrier& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    operator const VkMemoryBarrier&() const
+    {
+      return *reinterpret_cast<const VkMemoryBarrier*>(this);
+    }
+
+    bool operator==( MemoryBarrier const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask );
+    }
+
+    bool operator!=( MemoryBarrier const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    AccessFlags srcAccessMask;
+    AccessFlags dstAccessMask;
+  };
+  static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+
+  struct BufferMemoryBarrier
+  {
+    BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
+      : sType( StructureType::eBufferMemoryBarrier )
+      , pNext( nullptr )
+      , srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+      , buffer( buffer_ )
+      , offset( offset_ )
+      , size( size_ )
+    {
+    }
+
+    BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) );
+    }
+
+    BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) );
+      return *this;
+    }
+    BufferMemoryBarrier& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BufferMemoryBarrier& setOffset( DeviceSize offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    BufferMemoryBarrier& setSize( DeviceSize size_ )
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator const VkBufferMemoryBarrier&() const
+    {
+      return *reinterpret_cast<const VkBufferMemoryBarrier*>(this);
+    }
+
+    bool operator==( BufferMemoryBarrier const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferMemoryBarrier const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    AccessFlags srcAccessMask;
+    AccessFlags dstAccessMask;
+    uint32_t srcQueueFamilyIndex;
+    uint32_t dstQueueFamilyIndex;
+    Buffer buffer;
+    DeviceSize offset;
+    DeviceSize size;
+  };
+  static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
+
+  enum class BufferUsageFlagBits
+  {
+    eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+    eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+    eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+    eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+    eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+    eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+    eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT
+  };
+
+  using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
+
+  VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
+  {
+    return BufferUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
+  {
+    return ~( BufferUsageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<BufferUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer)
+    };
+  };
+
+  enum class BufferCreateFlagBits
+  {
+    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT
+  };
+
+  using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
+
+  VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
+  {
+    return BufferCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
+  {
+    return ~( BufferCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<BufferCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased)
+    };
+  };
+
+  struct BufferCreateInfo
+  {
+    BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr )
+      : sType( StructureType::eBufferCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , size( size_ )
+      , usage( usage_ )
+      , sharingMode( sharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+    {
+    }
+
+    BufferCreateInfo( VkBufferCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferCreateInfo ) );
+    }
+
+    BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferCreateInfo ) );
+      return *this;
+    }
+    BufferCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferCreateInfo& setFlags( BufferCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    BufferCreateInfo& setSize( DeviceSize size_ )
+    {
+      size = size_;
+      return *this;
+    }
+
+    BufferCreateInfo& setUsage( BufferUsageFlags usage_ )
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    BufferCreateInfo& setSharingMode( SharingMode sharingMode_ )
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    operator const VkBufferCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkBufferCreateInfo*>(this);
+    }
+
+    bool operator==( BufferCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( size == rhs.size )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+    }
+
+    bool operator!=( BufferCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    BufferCreateFlags flags;
+    DeviceSize size;
+    BufferUsageFlags usage;
+    SharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+  };
+  static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class ShaderStageFlagBits
+  {
+    eVertex = VK_SHADER_STAGE_VERTEX_BIT,
+    eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+    eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+    eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
+    eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
+    eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
+    eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
+    eAll = VK_SHADER_STAGE_ALL
+  };
+
+  using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
+
+  VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
+  {
+    return ShaderStageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
+  {
+    return ~( ShaderStageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ShaderStageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll)
+    };
+  };
+
+  struct DescriptorSetLayoutBinding
+  {
+    DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr )
+      : binding( binding_ )
+      , descriptorType( descriptorType_ )
+      , descriptorCount( descriptorCount_ )
+      , stageFlags( stageFlags_ )
+      , pImmutableSamplers( pImmutableSamplers_ )
+    {
+    }
+
+    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) );
+    }
+
+    DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) );
+      return *this;
+    }
+    DescriptorSetLayoutBinding& setBinding( uint32_t binding_ )
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ )
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ )
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ )
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ )
+    {
+      pImmutableSamplers = pImmutableSamplers_;
+      return *this;
+    }
+
+    operator const VkDescriptorSetLayoutBinding&() const
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>(this);
+    }
+
+    bool operator==( DescriptorSetLayoutBinding const& rhs ) const
+    {
+      return ( binding == rhs.binding )
+          && ( descriptorType == rhs.descriptorType )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( stageFlags == rhs.stageFlags )
+          && ( pImmutableSamplers == rhs.pImmutableSamplers );
+    }
+
+    bool operator!=( DescriptorSetLayoutBinding const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t binding;
+    DescriptorType descriptorType;
+    uint32_t descriptorCount;
+    ShaderStageFlags stageFlags;
+    const Sampler* pImmutableSamplers;
+  };
+  static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
+
+  struct PipelineShaderStageCreateInfo
+  {
+    PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(), ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, ShaderModule module_ = ShaderModule(), const char* pName_ = nullptr, const SpecializationInfo* pSpecializationInfo_ = nullptr )
+      : sType( StructureType::ePipelineShaderStageCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , stage( stage_ )
+      , module( module_ )
+      , pName( pName_ )
+      , pSpecializationInfo( pSpecializationInfo_ )
+    {
+    }
+
+    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) );
+    }
+
+    PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) );
+      return *this;
+    }
+    PipelineShaderStageCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ )
+    {
+      stage = stage_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo& setModule( ShaderModule module_ )
+    {
+      module = module_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo& setPName( const char* pName_ )
+    {
+      pName = pName_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ )
+    {
+      pSpecializationInfo = pSpecializationInfo_;
+      return *this;
+    }
+
+    operator const VkPipelineShaderStageCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineShaderStageCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( module == rhs.module )
+          && ( pName == rhs.pName )
+          && ( pSpecializationInfo == rhs.pSpecializationInfo );
+    }
+
+    bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineShaderStageCreateFlags flags;
+    ShaderStageFlagBits stage;
+    ShaderModule module;
+    const char* pName;
+    const SpecializationInfo* pSpecializationInfo;
+  };
+  static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PushConstantRange
+  {
+    PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(), uint32_t offset_ = 0, uint32_t size_ = 0 )
+      : stageFlags( stageFlags_ )
+      , offset( offset_ )
+      , size( size_ )
+    {
+    }
+
+    PushConstantRange( VkPushConstantRange const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PushConstantRange ) );
+    }
+
+    PushConstantRange& operator=( VkPushConstantRange const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PushConstantRange ) );
+      return *this;
+    }
+    PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ )
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    PushConstantRange& setOffset( uint32_t offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    PushConstantRange& setSize( uint32_t size_ )
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator const VkPushConstantRange&() const
+    {
+      return *reinterpret_cast<const VkPushConstantRange*>(this);
+    }
+
+    bool operator==( PushConstantRange const& rhs ) const
+    {
+      return ( stageFlags == rhs.stageFlags )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( PushConstantRange const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ShaderStageFlags stageFlags;
+    uint32_t offset;
+    uint32_t size;
+  };
+  static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+
+  struct PipelineLayoutCreateInfo
+  {
+    PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(), uint32_t setLayoutCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr, uint32_t pushConstantRangeCount_ = 0, const PushConstantRange* pPushConstantRanges_ = nullptr )
+      : sType( StructureType::ePipelineLayoutCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , setLayoutCount( setLayoutCount_ )
+      , pSetLayouts( pSetLayouts_ )
+      , pushConstantRangeCount( pushConstantRangeCount_ )
+      , pPushConstantRanges( pPushConstantRanges_ )
+    {
+    }
+
+    PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) );
+    }
+
+    PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) );
+      return *this;
+    }
+    PipelineLayoutCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ )
+    {
+      setLayoutCount = setLayoutCount_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ )
+    {
+      pushConstantRangeCount = pushConstantRangeCount_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ )
+    {
+      pPushConstantRanges = pPushConstantRanges_;
+      return *this;
+    }
+
+    operator const VkPipelineLayoutCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineLayoutCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( setLayoutCount == rhs.setLayoutCount )
+          && ( pSetLayouts == rhs.pSetLayouts )
+          && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
+          && ( pPushConstantRanges == rhs.pPushConstantRanges );
+    }
+
+    bool operator!=( PipelineLayoutCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineLayoutCreateFlags flags;
+    uint32_t setLayoutCount;
+    const DescriptorSetLayout* pSetLayouts;
+    uint32_t pushConstantRangeCount;
+    const PushConstantRange* pPushConstantRanges;
+  };
+  static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class ImageUsageFlagBits
+  {
+    eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+    eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
+    eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
+    eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+    eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
+    eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
+  };
+
+  using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
+
+  VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
+  {
+    return ImageUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
+  {
+    return ~( ImageUsageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ImageUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment)
+    };
+  };
+
+  struct SharedPresentSurfaceCapabilitiesKHR
+  {
+    operator const VkSharedPresentSurfaceCapabilitiesKHR&() const
+    {
+      return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>(this);
+    }
+
+    bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
+    }
+
+    bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    ImageUsageFlags sharedPresentSupportedUsageFlags;
+  };
+  static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+  enum class ImageCreateFlagBits
+  {
+    eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+    eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+    eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+    eBindSfrKHX = VK_IMAGE_CREATE_BIND_SFR_BIT_KHX,
+    e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR
+  };
+
+  using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
+
+  VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
+  {
+    return ImageCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
+  {
+    return ~( ImageCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ImageCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eBindSfrKHX) | VkFlags(ImageCreateFlagBits::e2DArrayCompatibleKHR)
+    };
+  };
+
+  struct PhysicalDeviceImageFormatInfo2KHR
+  {
+    PhysicalDeviceImageFormatInfo2KHR( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), ImageCreateFlags flags_ = ImageCreateFlags() )
+      : sType( StructureType::ePhysicalDeviceImageFormatInfo2KHR )
+      , pNext( nullptr )
+      , format( format_ )
+      , type( type_ )
+      , tiling( tiling_ )
+      , usage( usage_ )
+      , flags( flags_ )
+    {
+    }
+
+    PhysicalDeviceImageFormatInfo2KHR( VkPhysicalDeviceImageFormatInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2KHR ) );
+    }
+
+    PhysicalDeviceImageFormatInfo2KHR& operator=( VkPhysicalDeviceImageFormatInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2KHR ) );
+      return *this;
+    }
+    PhysicalDeviceImageFormatInfo2KHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2KHR& setFormat( Format format_ )
+    {
+      format = format_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2KHR& setType( ImageType type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2KHR& setTiling( ImageTiling tiling_ )
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2KHR& setUsage( ImageUsageFlags usage_ )
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2KHR& setFlags( ImageCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceImageFormatInfo2KHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceImageFormatInfo2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( type == rhs.type )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( PhysicalDeviceImageFormatInfo2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Format format;
+    ImageType type;
+    ImageTiling tiling;
+    ImageUsageFlags usage;
+    ImageCreateFlags flags;
+  };
+  static_assert( sizeof( PhysicalDeviceImageFormatInfo2KHR ) == sizeof( VkPhysicalDeviceImageFormatInfo2KHR ), "struct and wrapper have different size!" );
+
+  enum class PipelineCreateFlagBits
+  {
+    eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+    eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+    eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+    eViewIndexFromDeviceIndexKHX = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHX,
+    eDispatchBaseKHX = VK_PIPELINE_CREATE_DISPATCH_BASE_KHX
+  };
+
+  using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
+
+  VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
+  {
+    return PipelineCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
+  {
+    return ~( PipelineCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<PipelineCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndexKHX) | VkFlags(PipelineCreateFlagBits::eDispatchBaseKHX)
+    };
+  };
+
+  struct ComputePipelineCreateInfo
+  {
+    ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
+      : sType( StructureType::eComputePipelineCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , stage( stage_ )
+      , layout( layout_ )
+      , basePipelineHandle( basePipelineHandle_ )
+      , basePipelineIndex( basePipelineIndex_ )
+    {
+    }
+
+    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) );
+    }
+
+    ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) );
+      return *this;
+    }
+    ComputePipelineCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ )
+    {
+      stage = stage_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ )
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+    operator const VkComputePipelineCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkComputePipelineCreateInfo*>(this);
+    }
+
+    bool operator==( ComputePipelineCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( ComputePipelineCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineCreateFlags flags;
+    PipelineShaderStageCreateInfo stage;
+    PipelineLayout layout;
+    Pipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+  };
+  static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class ColorComponentFlagBits
+  {
+    eR = VK_COLOR_COMPONENT_R_BIT,
+    eG = VK_COLOR_COMPONENT_G_BIT,
+    eB = VK_COLOR_COMPONENT_B_BIT,
+    eA = VK_COLOR_COMPONENT_A_BIT
+  };
+
+  using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
+
+  VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
+  {
+    return ColorComponentFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
+  {
+    return ~( ColorComponentFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ColorComponentFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
+    };
+  };
+
+  struct PipelineColorBlendAttachmentState
+  {
+    PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
+      : blendEnable( blendEnable_ )
+      , srcColorBlendFactor( srcColorBlendFactor_ )
+      , dstColorBlendFactor( dstColorBlendFactor_ )
+      , colorBlendOp( colorBlendOp_ )
+      , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
+      , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
+      , alphaBlendOp( alphaBlendOp_ )
+      , colorWriteMask( colorWriteMask_ )
+    {
+    }
+
+    PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) );
+    }
+
+    PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) );
+      return *this;
+    }
+    PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ )
+    {
+      blendEnable = blendEnable_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ )
+    {
+      srcColorBlendFactor = srcColorBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ )
+    {
+      dstColorBlendFactor = dstColorBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ )
+    {
+      colorBlendOp = colorBlendOp_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ )
+    {
+      srcAlphaBlendFactor = srcAlphaBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ )
+    {
+      dstAlphaBlendFactor = dstAlphaBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ )
+    {
+      alphaBlendOp = alphaBlendOp_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ )
+    {
+      colorWriteMask = colorWriteMask_;
+      return *this;
+    }
+
+    operator const VkPipelineColorBlendAttachmentState&() const
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>(this);
+    }
+
+    bool operator==( PipelineColorBlendAttachmentState const& rhs ) const
+    {
+      return ( blendEnable == rhs.blendEnable )
+          && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
+          && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
+          && ( colorBlendOp == rhs.colorBlendOp )
+          && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
+          && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
+          && ( alphaBlendOp == rhs.alphaBlendOp )
+          && ( colorWriteMask == rhs.colorWriteMask );
+    }
+
+    bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Bool32 blendEnable;
+    BlendFactor srcColorBlendFactor;
+    BlendFactor dstColorBlendFactor;
+    BlendOp colorBlendOp;
+    BlendFactor srcAlphaBlendFactor;
+    BlendFactor dstAlphaBlendFactor;
+    BlendOp alphaBlendOp;
+    ColorComponentFlags colorWriteMask;
+  };
+  static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
+
+  struct PipelineColorBlendStateCreateInfo
+  {
+    PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(), Bool32 logicOpEnable_ = 0, LogicOp logicOp_ = LogicOp::eClear, uint32_t attachmentCount_ = 0, const PipelineColorBlendAttachmentState* pAttachments_ = nullptr, std::array<float,4> const& blendConstants_ = { { 0, 0, 0, 0 } } )
+      : sType( StructureType::ePipelineColorBlendStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , logicOpEnable( logicOpEnable_ )
+      , logicOp( logicOp_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+    {
+      memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
+    }
+
+    PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
+    }
+
+    PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) );
+      return *this;
+    }
+    PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ )
+    {
+      logicOpEnable = logicOpEnable_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ )
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ )
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo& setBlendConstants( std::array<float,4> blendConstants_ )
+    {
+      memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
+      return *this;
+    }
+
+    operator const VkPipelineColorBlendStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( logicOpEnable == rhs.logicOpEnable )
+          && ( logicOp == rhs.logicOp )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
+    }
+
+    bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineColorBlendStateCreateFlags flags;
+    Bool32 logicOpEnable;
+    LogicOp logicOp;
+    uint32_t attachmentCount;
+    const PipelineColorBlendAttachmentState* pAttachments;
+    float blendConstants[4];
+  };
+  static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class FenceCreateFlagBits
+  {
+    eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
+  };
+
+  using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
+
+  VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
+  {
+    return FenceCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
+  {
+    return ~( FenceCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<FenceCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
+    };
+  };
+
+  struct FenceCreateInfo
+  {
+    FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
+      : sType( StructureType::eFenceCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+    {
+    }
+
+    FenceCreateInfo( VkFenceCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FenceCreateInfo ) );
+    }
+
+    FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FenceCreateInfo ) );
+      return *this;
+    }
+    FenceCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceCreateInfo& setFlags( FenceCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator const VkFenceCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkFenceCreateInfo*>(this);
+    }
+
+    bool operator==( FenceCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( FenceCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    FenceCreateFlags flags;
+  };
+  static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class FormatFeatureFlagBits
+  {
+    eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+    eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+    eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+    eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+    eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+    eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+    eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+    eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+    eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+    eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+    eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+    eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR,
+    eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+    eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT
+  };
+
+  using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
+
+  VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
+  {
+    return FormatFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
+  {
+    return ~( FormatFeatureFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<FormatFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eTransferSrcKHR) | VkFlags(FormatFeatureFlagBits::eTransferDstKHR) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT)
+    };
+  };
+
+  struct FormatProperties
+  {
+    operator const VkFormatProperties&() const
+    {
+      return *reinterpret_cast<const VkFormatProperties*>(this);
+    }
+
+    bool operator==( FormatProperties const& rhs ) const
+    {
+      return ( linearTilingFeatures == rhs.linearTilingFeatures )
+          && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
+          && ( bufferFeatures == rhs.bufferFeatures );
+    }
+
+    bool operator!=( FormatProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    FormatFeatureFlags linearTilingFeatures;
+    FormatFeatureFlags optimalTilingFeatures;
+    FormatFeatureFlags bufferFeatures;
+  };
+  static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+
+  struct FormatProperties2KHR
+  {
+    operator const VkFormatProperties2KHR&() const
+    {
+      return *reinterpret_cast<const VkFormatProperties2KHR*>(this);
+    }
+
+    bool operator==( FormatProperties2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatProperties == rhs.formatProperties );
+    }
+
+    bool operator!=( FormatProperties2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    FormatProperties formatProperties;
+  };
+  static_assert( sizeof( FormatProperties2KHR ) == sizeof( VkFormatProperties2KHR ), "struct and wrapper have different size!" );
+
+  enum class QueryControlFlagBits
+  {
+    ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
+  };
+
+  using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
+
+  VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
+  {
+    return QueryControlFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
+  {
+    return ~( QueryControlFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueryControlFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryControlFlagBits::ePrecise)
+    };
+  };
+
+  enum class QueryResultFlagBits
+  {
+    e64 = VK_QUERY_RESULT_64_BIT,
+    eWait = VK_QUERY_RESULT_WAIT_BIT,
+    eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
+    ePartial = VK_QUERY_RESULT_PARTIAL_BIT
+  };
+
+  using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
+
+  VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
+  {
+    return QueryResultFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
+  {
+    return ~( QueryResultFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueryResultFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
+    };
+  };
+
+  enum class CommandBufferUsageFlagBits
+  {
+    eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+    eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+    eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+  };
+
+  using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
+
+  VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
+  {
+    return CommandBufferUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
+  {
+    return ~( CommandBufferUsageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandBufferUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
+    };
+  };
+
+  enum class QueryPipelineStatisticFlagBits
+  {
+    eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+    eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+    eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+    eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+    eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+    eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+    eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+    eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+    eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
+  };
+
+  using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
+
+  VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
+  {
+    return QueryPipelineStatisticFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
+  {
+    return ~( QueryPipelineStatisticFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
+    };
+  };
+
+  struct CommandBufferInheritanceInfo
+  {
+    CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
+      : sType( StructureType::eCommandBufferInheritanceInfo )
+      , pNext( nullptr )
+      , renderPass( renderPass_ )
+      , subpass( subpass_ )
+      , framebuffer( framebuffer_ )
+      , occlusionQueryEnable( occlusionQueryEnable_ )
+      , queryFlags( queryFlags_ )
+      , pipelineStatistics( pipelineStatistics_ )
+    {
+    }
+
+    CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) );
+    }
+
+    CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) );
+      return *this;
+    }
+    CommandBufferInheritanceInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ )
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ )
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ )
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ )
+    {
+      occlusionQueryEnable = occlusionQueryEnable_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ )
+    {
+      queryFlags = queryFlags_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+
+    operator const VkCommandBufferInheritanceInfo&() const
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>(this);
+    }
+
+    bool operator==( CommandBufferInheritanceInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
+          && ( queryFlags == rhs.queryFlags )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+    }
+
+    bool operator!=( CommandBufferInheritanceInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    RenderPass renderPass;
+    uint32_t subpass;
+    Framebuffer framebuffer;
+    Bool32 occlusionQueryEnable;
+    QueryControlFlags queryFlags;
+    QueryPipelineStatisticFlags pipelineStatistics;
+  };
+  static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
+
+  struct CommandBufferBeginInfo
+  {
+    CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(), const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr )
+      : sType( StructureType::eCommandBufferBeginInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , pInheritanceInfo( pInheritanceInfo_ )
+    {
+    }
+
+    CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) );
+    }
+
+    CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) );
+      return *this;
+    }
+    CommandBufferBeginInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ )
+    {
+      pInheritanceInfo = pInheritanceInfo_;
+      return *this;
+    }
+
+    operator const VkCommandBufferBeginInfo&() const
+    {
+      return *reinterpret_cast<const VkCommandBufferBeginInfo*>(this);
+    }
+
+    bool operator==( CommandBufferBeginInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pInheritanceInfo == rhs.pInheritanceInfo );
+    }
+
+    bool operator!=( CommandBufferBeginInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    CommandBufferUsageFlags flags;
+    const CommandBufferInheritanceInfo* pInheritanceInfo;
+  };
+  static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+
+  struct QueryPoolCreateInfo
+  {
+    QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(), QueryType queryType_ = QueryType::eOcclusion, uint32_t queryCount_ = 0, QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
+      : sType( StructureType::eQueryPoolCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , queryType( queryType_ )
+      , queryCount( queryCount_ )
+      , pipelineStatistics( pipelineStatistics_ )
+    {
+    }
+
+    QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) );
+    }
+
+    QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) );
+      return *this;
+    }
+    QueryPoolCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo& setQueryType( QueryType queryType_ )
+    {
+      queryType = queryType_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ )
+    {
+      queryCount = queryCount_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+
+    operator const VkQueryPoolCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkQueryPoolCreateInfo*>(this);
+    }
+
+    bool operator==( QueryPoolCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queryType == rhs.queryType )
+          && ( queryCount == rhs.queryCount )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+    }
+
+    bool operator!=( QueryPoolCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    QueryPoolCreateFlags flags;
+    QueryType queryType;
+    uint32_t queryCount;
+    QueryPipelineStatisticFlags pipelineStatistics;
+  };
+  static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class ImageAspectFlagBits
+  {
+    eColor = VK_IMAGE_ASPECT_COLOR_BIT,
+    eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
+    eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
+    eMetadata = VK_IMAGE_ASPECT_METADATA_BIT
+  };
+
+  using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
+
+  VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
+  {
+    return ImageAspectFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
+  {
+    return ~( ImageAspectFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<ImageAspectFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata)
+    };
+  };
+
+  struct ImageSubresource
+  {
+    ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 )
+      : aspectMask( aspectMask_ )
+      , mipLevel( mipLevel_ )
+      , arrayLayer( arrayLayer_ )
+    {
+    }
+
+    ImageSubresource( VkImageSubresource const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSubresource ) );
+    }
+
+    ImageSubresource& operator=( VkImageSubresource const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSubresource ) );
+      return *this;
+    }
+    ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ )
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresource& setMipLevel( uint32_t mipLevel_ )
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    ImageSubresource& setArrayLayer( uint32_t arrayLayer_ )
+    {
+      arrayLayer = arrayLayer_;
+      return *this;
+    }
+
+    operator const VkImageSubresource&() const
+    {
+      return *reinterpret_cast<const VkImageSubresource*>(this);
+    }
+
+    bool operator==( ImageSubresource const& rhs ) const
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( arrayLayer == rhs.arrayLayer );
+    }
+
+    bool operator!=( ImageSubresource const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageAspectFlags aspectMask;
+    uint32_t mipLevel;
+    uint32_t arrayLayer;
+  };
+  static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
+
+  struct ImageSubresourceLayers
+  {
+    ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
+      : aspectMask( aspectMask_ )
+      , mipLevel( mipLevel_ )
+      , baseArrayLayer( baseArrayLayer_ )
+      , layerCount( layerCount_ )
+    {
+    }
+
+    ImageSubresourceLayers( VkImageSubresourceLayers const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) );
+    }
+
+    ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) );
+      return *this;
+    }
+    ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ )
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ )
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ )
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ )
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    operator const VkImageSubresourceLayers&() const
+    {
+      return *reinterpret_cast<const VkImageSubresourceLayers*>(this);
+    }
+
+    bool operator==( ImageSubresourceLayers const& rhs ) const
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ImageSubresourceLayers const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageAspectFlags aspectMask;
+    uint32_t mipLevel;
+    uint32_t baseArrayLayer;
+    uint32_t layerCount;
+  };
+  static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
+
+  struct ImageSubresourceRange
+  {
+    ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t baseMipLevel_ = 0, uint32_t levelCount_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
+      : aspectMask( aspectMask_ )
+      , baseMipLevel( baseMipLevel_ )
+      , levelCount( levelCount_ )
+      , baseArrayLayer( baseArrayLayer_ )
+      , layerCount( layerCount_ )
+    {
+    }
+
+    ImageSubresourceRange( VkImageSubresourceRange const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSubresourceRange ) );
+    }
+
+    ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageSubresourceRange ) );
+      return *this;
+    }
+    ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ )
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ )
+    {
+      baseMipLevel = baseMipLevel_;
+      return *this;
+    }
+
+    ImageSubresourceRange& setLevelCount( uint32_t levelCount_ )
+    {
+      levelCount = levelCount_;
+      return *this;
+    }
+
+    ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ )
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ImageSubresourceRange& setLayerCount( uint32_t layerCount_ )
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    operator const VkImageSubresourceRange&() const
+    {
+      return *reinterpret_cast<const VkImageSubresourceRange*>(this);
+    }
+
+    bool operator==( ImageSubresourceRange const& rhs ) const
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( baseMipLevel == rhs.baseMipLevel )
+          && ( levelCount == rhs.levelCount )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ImageSubresourceRange const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageAspectFlags aspectMask;
+    uint32_t baseMipLevel;
+    uint32_t levelCount;
+    uint32_t baseArrayLayer;
+    uint32_t layerCount;
+  };
+  static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+
+  struct ImageMemoryBarrier
+  {
+    ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Image image_ = Image(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
+      : sType( StructureType::eImageMemoryBarrier )
+      , pNext( nullptr )
+      , srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , oldLayout( oldLayout_ )
+      , newLayout( newLayout_ )
+      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+      , image( image_ )
+      , subresourceRange( subresourceRange_ )
+    {
+    }
+
+    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) );
+    }
+
+    ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) );
+      return *this;
+    }
+    ImageMemoryBarrier& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ )
+    {
+      oldLayout = oldLayout_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ )
+    {
+      newLayout = newLayout_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+
+    operator const VkImageMemoryBarrier&() const
+    {
+      return *reinterpret_cast<const VkImageMemoryBarrier*>(this);
+    }
+
+    bool operator==( ImageMemoryBarrier const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( oldLayout == rhs.oldLayout )
+          && ( newLayout == rhs.newLayout )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( image == rhs.image )
+          && ( subresourceRange == rhs.subresourceRange );
+    }
+
+    bool operator!=( ImageMemoryBarrier const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    AccessFlags srcAccessMask;
+    AccessFlags dstAccessMask;
+    ImageLayout oldLayout;
+    ImageLayout newLayout;
+    uint32_t srcQueueFamilyIndex;
+    uint32_t dstQueueFamilyIndex;
+    Image image;
+    ImageSubresourceRange subresourceRange;
+  };
+  static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+
+  struct ImageViewCreateInfo
+  {
+    ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(), Image image_ = Image(), ImageViewType viewType_ = ImageViewType::e1D, Format format_ = Format::eUndefined, ComponentMapping components_ = ComponentMapping(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
+      : sType( StructureType::eImageViewCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , image( image_ )
+      , viewType( viewType_ )
+      , format( format_ )
+      , components( components_ )
+      , subresourceRange( subresourceRange_ )
+    {
+    }
+
+    ImageViewCreateInfo( VkImageViewCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) );
+    }
+
+    ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) );
+      return *this;
+    }
+    ImageViewCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImageViewCreateInfo& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    ImageViewCreateInfo& setViewType( ImageViewType viewType_ )
+    {
+      viewType = viewType_;
+      return *this;
+    }
+
+    ImageViewCreateInfo& setFormat( Format format_ )
+    {
+      format = format_;
+      return *this;
+    }
+
+    ImageViewCreateInfo& setComponents( ComponentMapping components_ )
+    {
+      components = components_;
+      return *this;
+    }
+
+    ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+
+    operator const VkImageViewCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkImageViewCreateInfo*>(this);
+    }
+
+    bool operator==( ImageViewCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( image == rhs.image )
+          && ( viewType == rhs.viewType )
+          && ( format == rhs.format )
+          && ( components == rhs.components )
+          && ( subresourceRange == rhs.subresourceRange );
+    }
+
+    bool operator!=( ImageViewCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ImageViewCreateFlags flags;
+    Image image;
+    ImageViewType viewType;
+    Format format;
+    ComponentMapping components;
+    ImageSubresourceRange subresourceRange;
+  };
+  static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+
+  struct ImageCopy
+  {
+    ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
+      : srcSubresource( srcSubresource_ )
+      , srcOffset( srcOffset_ )
+      , dstSubresource( dstSubresource_ )
+      , dstOffset( dstOffset_ )
+      , extent( extent_ )
+    {
+    }
+
+    ImageCopy( VkImageCopy const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageCopy ) );
+    }
+
+    ImageCopy& operator=( VkImageCopy const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageCopy ) );
+      return *this;
+    }
+    ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageCopy& setSrcOffset( Offset3D srcOffset_ )
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageCopy& setDstOffset( Offset3D dstOffset_ )
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageCopy& setExtent( Extent3D extent_ )
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    operator const VkImageCopy&() const
+    {
+      return *reinterpret_cast<const VkImageCopy*>(this);
+    }
+
+    bool operator==( ImageCopy const& rhs ) const
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( ImageCopy const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageSubresourceLayers srcSubresource;
+    Offset3D srcOffset;
+    ImageSubresourceLayers dstSubresource;
+    Offset3D dstOffset;
+    Extent3D extent;
+  };
+  static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+
+  struct ImageBlit
+  {
+    ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& srcOffsets_ = { { Offset3D(), Offset3D() } }, ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& dstOffsets_ = { { Offset3D(), Offset3D() } } )
+      : srcSubresource( srcSubresource_ )
+      , dstSubresource( dstSubresource_ )
+    {
+      memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
+      memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
+    }
+
+    ImageBlit( VkImageBlit const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageBlit ) );
+    }
+
+    ImageBlit& operator=( VkImageBlit const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageBlit ) );
+      return *this;
+    }
+    ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageBlit& setSrcOffsets( std::array<Offset3D,2> srcOffsets_ )
+    {
+      memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
+      return *this;
+    }
+
+    ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageBlit& setDstOffsets( std::array<Offset3D,2> dstOffsets_ )
+    {
+      memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
+      return *this;
+    }
+
+    operator const VkImageBlit&() const
+    {
+      return *reinterpret_cast<const VkImageBlit*>(this);
+    }
+
+    bool operator==( ImageBlit const& rhs ) const
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 );
+    }
+
+    bool operator!=( ImageBlit const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageSubresourceLayers srcSubresource;
+    Offset3D srcOffsets[2];
+    ImageSubresourceLayers dstSubresource;
+    Offset3D dstOffsets[2];
+  };
+  static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+
+  struct BufferImageCopy
+  {
+    BufferImageCopy( DeviceSize bufferOffset_ = 0, uint32_t bufferRowLength_ = 0, uint32_t bufferImageHeight_ = 0, ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(), Offset3D imageOffset_ = Offset3D(), Extent3D imageExtent_ = Extent3D() )
+      : bufferOffset( bufferOffset_ )
+      , bufferRowLength( bufferRowLength_ )
+      , bufferImageHeight( bufferImageHeight_ )
+      , imageSubresource( imageSubresource_ )
+      , imageOffset( imageOffset_ )
+      , imageExtent( imageExtent_ )
+    {
+    }
+
+    BufferImageCopy( VkBufferImageCopy const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferImageCopy ) );
+    }
+
+    BufferImageCopy& operator=( VkBufferImageCopy const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BufferImageCopy ) );
+      return *this;
+    }
+    BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ )
+    {
+      bufferOffset = bufferOffset_;
+      return *this;
+    }
+
+    BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ )
+    {
+      bufferRowLength = bufferRowLength_;
+      return *this;
+    }
+
+    BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ )
+    {
+      bufferImageHeight = bufferImageHeight_;
+      return *this;
+    }
+
+    BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ )
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+
+    BufferImageCopy& setImageOffset( Offset3D imageOffset_ )
+    {
+      imageOffset = imageOffset_;
+      return *this;
+    }
+
+    BufferImageCopy& setImageExtent( Extent3D imageExtent_ )
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    operator const VkBufferImageCopy&() const
+    {
+      return *reinterpret_cast<const VkBufferImageCopy*>(this);
+    }
+
+    bool operator==( BufferImageCopy const& rhs ) const
+    {
+      return ( bufferOffset == rhs.bufferOffset )
+          && ( bufferRowLength == rhs.bufferRowLength )
+          && ( bufferImageHeight == rhs.bufferImageHeight )
+          && ( imageSubresource == rhs.imageSubresource )
+          && ( imageOffset == rhs.imageOffset )
+          && ( imageExtent == rhs.imageExtent );
+    }
+
+    bool operator!=( BufferImageCopy const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DeviceSize bufferOffset;
+    uint32_t bufferRowLength;
+    uint32_t bufferImageHeight;
+    ImageSubresourceLayers imageSubresource;
+    Offset3D imageOffset;
+    Extent3D imageExtent;
+  };
+  static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+
+  struct ImageResolve
+  {
+    ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
+      : srcSubresource( srcSubresource_ )
+      , srcOffset( srcOffset_ )
+      , dstSubresource( dstSubresource_ )
+      , dstOffset( dstOffset_ )
+      , extent( extent_ )
+    {
+    }
+
+    ImageResolve( VkImageResolve const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageResolve ) );
+    }
+
+    ImageResolve& operator=( VkImageResolve const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageResolve ) );
+      return *this;
+    }
+    ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageResolve& setSrcOffset( Offset3D srcOffset_ )
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageResolve& setDstOffset( Offset3D dstOffset_ )
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageResolve& setExtent( Extent3D extent_ )
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    operator const VkImageResolve&() const
+    {
+      return *reinterpret_cast<const VkImageResolve*>(this);
+    }
+
+    bool operator==( ImageResolve const& rhs ) const
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( ImageResolve const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageSubresourceLayers srcSubresource;
+    Offset3D srcOffset;
+    ImageSubresourceLayers dstSubresource;
+    Offset3D dstOffset;
+    Extent3D extent;
+  };
+  static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+
+  struct ClearAttachment
+  {
+    ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t colorAttachment_ = 0, ClearValue clearValue_ = ClearValue() )
+      : aspectMask( aspectMask_ )
+      , colorAttachment( colorAttachment_ )
+      , clearValue( clearValue_ )
+    {
+    }
+
+    ClearAttachment( VkClearAttachment const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ClearAttachment ) );
+    }
+
+    ClearAttachment& operator=( VkClearAttachment const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ClearAttachment ) );
+      return *this;
+    }
+    ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ )
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ClearAttachment& setColorAttachment( uint32_t colorAttachment_ )
+    {
+      colorAttachment = colorAttachment_;
+      return *this;
+    }
+
+    ClearAttachment& setClearValue( ClearValue clearValue_ )
+    {
+      clearValue = clearValue_;
+      return *this;
+    }
+
+    operator const VkClearAttachment&() const
+    {
+      return *reinterpret_cast<const VkClearAttachment*>(this);
+    }
+
+    ImageAspectFlags aspectMask;
+    uint32_t colorAttachment;
+    ClearValue clearValue;
+  };
+  static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
+
+  enum class SparseImageFormatFlagBits
+  {
+    eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
+    eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
+    eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
+  };
+
+  using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
+
+  VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
+  {
+    return SparseImageFormatFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
+  {
+    return ~( SparseImageFormatFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<SparseImageFormatFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
+    };
+  };
+
+  struct SparseImageFormatProperties
+  {
+    operator const VkSparseImageFormatProperties&() const
+    {
+      return *reinterpret_cast<const VkSparseImageFormatProperties*>(this);
+    }
+
+    bool operator==( SparseImageFormatProperties const& rhs ) const
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( imageGranularity == rhs.imageGranularity )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SparseImageFormatProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageAspectFlags aspectMask;
+    Extent3D imageGranularity;
+    SparseImageFormatFlags flags;
+  };
+  static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
+
+  struct SparseImageMemoryRequirements
+  {
+    operator const VkSparseImageMemoryRequirements&() const
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements*>(this);
+    }
+
+    bool operator==( SparseImageMemoryRequirements const& rhs ) const
+    {
+      return ( formatProperties == rhs.formatProperties )
+          && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
+          && ( imageMipTailSize == rhs.imageMipTailSize )
+          && ( imageMipTailOffset == rhs.imageMipTailOffset )
+          && ( imageMipTailStride == rhs.imageMipTailStride );
+    }
+
+    bool operator!=( SparseImageMemoryRequirements const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    SparseImageFormatProperties formatProperties;
+    uint32_t imageMipTailFirstLod;
+    DeviceSize imageMipTailSize;
+    DeviceSize imageMipTailOffset;
+    DeviceSize imageMipTailStride;
+  };
+  static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
+
+  struct SparseImageFormatProperties2KHR
+  {
+    operator const VkSparseImageFormatProperties2KHR&() const
+    {
+      return *reinterpret_cast<const VkSparseImageFormatProperties2KHR*>(this);
+    }
+
+    bool operator==( SparseImageFormatProperties2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( properties == rhs.properties );
+    }
+
+    bool operator!=( SparseImageFormatProperties2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    SparseImageFormatProperties properties;
+  };
+  static_assert( sizeof( SparseImageFormatProperties2KHR ) == sizeof( VkSparseImageFormatProperties2KHR ), "struct and wrapper have different size!" );
+
+  struct SparseImageMemoryRequirements2KHR
+  {
+    operator const VkSparseImageMemoryRequirements2KHR&() const
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements2KHR*>(this);
+    }
+
+    bool operator==( SparseImageMemoryRequirements2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+    }
+
+    bool operator!=( SparseImageMemoryRequirements2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    SparseImageMemoryRequirements memoryRequirements;
+  };
+  static_assert( sizeof( SparseImageMemoryRequirements2KHR ) == sizeof( VkSparseImageMemoryRequirements2KHR ), "struct and wrapper have different size!" );
+
+  enum class SparseMemoryBindFlagBits
+  {
+    eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
+  };
+
+  using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
+
+  VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
+  {
+    return SparseMemoryBindFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
+  {
+    return ~( SparseMemoryBindFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<SparseMemoryBindFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
+    };
+  };
+
+  struct SparseMemoryBind
+  {
+    SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
+      : resourceOffset( resourceOffset_ )
+      , size( size_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+      , flags( flags_ )
+    {
+    }
+
+    SparseMemoryBind( VkSparseMemoryBind const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseMemoryBind ) );
+    }
+
+    SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseMemoryBind ) );
+      return *this;
+    }
+    SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ )
+    {
+      resourceOffset = resourceOffset_;
+      return *this;
+    }
+
+    SparseMemoryBind& setSize( DeviceSize size_ )
+    {
+      size = size_;
+      return *this;
+    }
+
+    SparseMemoryBind& setMemory( DeviceMemory memory_ )
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator const VkSparseMemoryBind&() const
+    {
+      return *reinterpret_cast<const VkSparseMemoryBind*>(this);
+    }
+
+    bool operator==( SparseMemoryBind const& rhs ) const
+    {
+      return ( resourceOffset == rhs.resourceOffset )
+          && ( size == rhs.size )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SparseMemoryBind const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DeviceSize resourceOffset;
+    DeviceSize size;
+    DeviceMemory memory;
+    DeviceSize memoryOffset;
+    SparseMemoryBindFlags flags;
+  };
+  static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
+
+  struct SparseImageMemoryBind
+  {
+    SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(), Offset3D offset_ = Offset3D(), Extent3D extent_ = Extent3D(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
+      : subresource( subresource_ )
+      , offset( offset_ )
+      , extent( extent_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+      , flags( flags_ )
+    {
+    }
+
+    SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) );
+    }
+
+    SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) );
+      return *this;
+    }
+    SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ )
+    {
+      subresource = subresource_;
+      return *this;
+    }
+
+    SparseImageMemoryBind& setOffset( Offset3D offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    SparseImageMemoryBind& setExtent( Extent3D extent_ )
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    SparseImageMemoryBind& setMemory( DeviceMemory memory_ )
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator const VkSparseImageMemoryBind&() const
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryBind*>(this);
+    }
+
+    bool operator==( SparseImageMemoryBind const& rhs ) const
+    {
+      return ( subresource == rhs.subresource )
+          && ( offset == rhs.offset )
+          && ( extent == rhs.extent )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SparseImageMemoryBind const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageSubresource subresource;
+    Offset3D offset;
+    Extent3D extent;
+    DeviceMemory memory;
+    DeviceSize memoryOffset;
+    SparseMemoryBindFlags flags;
+  };
+  static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
+
+  struct SparseBufferMemoryBindInfo
+  {
+    SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
+      : buffer( buffer_ )
+      , bindCount( bindCount_ )
+      , pBinds( pBinds_ )
+    {
+    }
+
+    SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) );
+    }
+
+    SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) );
+      return *this;
+    }
+    SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ )
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+    operator const VkSparseBufferMemoryBindInfo&() const
+    {
+      return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>(this);
+    }
+
+    bool operator==( SparseBufferMemoryBindInfo const& rhs ) const
+    {
+      return ( buffer == rhs.buffer )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Buffer buffer;
+    uint32_t bindCount;
+    const SparseMemoryBind* pBinds;
+  };
+  static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
+
+  struct SparseImageOpaqueMemoryBindInfo
+  {
+    SparseImageOpaqueMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
+      : image( image_ )
+      , bindCount( bindCount_ )
+      , pBinds( pBinds_ )
+    {
+    }
+
+    SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
+    }
+
+    SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) );
+      return *this;
+    }
+    SparseImageOpaqueMemoryBindInfo& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ )
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+    operator const VkSparseImageOpaqueMemoryBindInfo&() const
+    {
+      return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>(this);
+    }
+
+    bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const
+    {
+      return ( image == rhs.image )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Image image;
+    uint32_t bindCount;
+    const SparseMemoryBind* pBinds;
+  };
+  static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
+
+  struct SparseImageMemoryBindInfo
+  {
+    SparseImageMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseImageMemoryBind* pBinds_ = nullptr )
+      : image( image_ )
+      , bindCount( bindCount_ )
+      , pBinds( pBinds_ )
+    {
+    }
+
+    SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) );
+    }
+
+    SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) );
+      return *this;
+    }
+    SparseImageMemoryBindInfo& setImage( Image image_ )
+    {
+      image = image_;
+      return *this;
+    }
+
+    SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ )
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ )
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+    operator const VkSparseImageMemoryBindInfo&() const
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>(this);
+    }
+
+    bool operator==( SparseImageMemoryBindInfo const& rhs ) const
+    {
+      return ( image == rhs.image )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseImageMemoryBindInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Image image;
+    uint32_t bindCount;
+    const SparseImageMemoryBind* pBinds;
+  };
+  static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
+
+  struct BindSparseInfo
+  {
+    BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t bufferBindCount_ = 0, const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, uint32_t imageOpaqueBindCount_ = 0, const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, uint32_t imageBindCount_ = 0, const SparseImageMemoryBindInfo* pImageBinds_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
+      : sType( StructureType::eBindSparseInfo )
+      , pNext( nullptr )
+      , waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , bufferBindCount( bufferBindCount_ )
+      , pBufferBinds( pBufferBinds_ )
+      , imageOpaqueBindCount( imageOpaqueBindCount_ )
+      , pImageOpaqueBinds( pImageOpaqueBinds_ )
+      , imageBindCount( imageBindCount_ )
+      , pImageBinds( pImageBinds_ )
+      , signalSemaphoreCount( signalSemaphoreCount_ )
+      , pSignalSemaphores( pSignalSemaphores_ )
+    {
+    }
+
+    BindSparseInfo( VkBindSparseInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindSparseInfo ) );
+    }
+
+    BindSparseInfo& operator=( VkBindSparseInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( BindSparseInfo ) );
+      return *this;
+    }
+    BindSparseInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+    BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ )
+    {
+      bufferBindCount = bufferBindCount_;
+      return *this;
+    }
+
+    BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ )
+    {
+      pBufferBinds = pBufferBinds_;
+      return *this;
+    }
+
+    BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ )
+    {
+      imageOpaqueBindCount = imageOpaqueBindCount_;
+      return *this;
+    }
+
+    BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ )
+    {
+      pImageOpaqueBinds = pImageOpaqueBinds_;
+      return *this;
+    }
+
+    BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ )
+    {
+      imageBindCount = imageBindCount_;
+      return *this;
+    }
+
+    BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ )
+    {
+      pImageBinds = pImageBinds_;
+      return *this;
+    }
+
+    BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+    operator const VkBindSparseInfo&() const
+    {
+      return *reinterpret_cast<const VkBindSparseInfo*>(this);
+    }
+
+    bool operator==( BindSparseInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( bufferBindCount == rhs.bufferBindCount )
+          && ( pBufferBinds == rhs.pBufferBinds )
+          && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
+          && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
+          && ( imageBindCount == rhs.imageBindCount )
+          && ( pImageBinds == rhs.pImageBinds )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+    }
+
+    bool operator!=( BindSparseInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    const Semaphore* pWaitSemaphores;
+    uint32_t bufferBindCount;
+    const SparseBufferMemoryBindInfo* pBufferBinds;
+    uint32_t imageOpaqueBindCount;
+    const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
+    uint32_t imageBindCount;
+    const SparseImageMemoryBindInfo* pImageBinds;
+    uint32_t signalSemaphoreCount;
+    const Semaphore* pSignalSemaphores;
+  };
+  static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
+
+  enum class PipelineStageFlagBits
+  {
+    eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+    eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+    eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+    eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+    eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+    eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+    eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+    eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+    eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+    eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+    eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+    eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
+    eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+    eHost = VK_PIPELINE_STAGE_HOST_BIT,
+    eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+    eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX
+  };
+
+  using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
+
+  VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
+  {
+    return PipelineStageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
+  {
+    return ~( PipelineStageFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<PipelineStageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX)
+    };
+  };
+
+  enum class CommandPoolCreateFlagBits
+  {
+    eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
+    eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
+  };
+
+  using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
+
+  VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
+  {
+    return CommandPoolCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
+  {
+    return ~( CommandPoolCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandPoolCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer)
+    };
+  };
+
+  struct CommandPoolCreateInfo
+  {
+    CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 )
+      : sType( StructureType::eCommandPoolCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , queueFamilyIndex( queueFamilyIndex_ )
+    {
+    }
+
+    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) );
+    }
+
+    CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) );
+      return *this;
+    }
+    CommandPoolCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    operator const VkCommandPoolCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkCommandPoolCreateInfo*>(this);
+    }
+
+    bool operator==( CommandPoolCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex );
+    }
+
+    bool operator!=( CommandPoolCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    CommandPoolCreateFlags flags;
+    uint32_t queueFamilyIndex;
+  };
+  static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class CommandPoolResetFlagBits
+  {
+    eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
+
+  VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
+  {
+    return CommandPoolResetFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
+  {
+    return ~( CommandPoolResetFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandPoolResetFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
+    };
+  };
+
+  enum class CommandBufferResetFlagBits
+  {
+    eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
+
+  VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
+  {
+    return CommandBufferResetFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
+  {
+    return ~( CommandBufferResetFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<CommandBufferResetFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
+    };
+  };
+
+  enum class SampleCountFlagBits
+  {
+    e1 = VK_SAMPLE_COUNT_1_BIT,
+    e2 = VK_SAMPLE_COUNT_2_BIT,
+    e4 = VK_SAMPLE_COUNT_4_BIT,
+    e8 = VK_SAMPLE_COUNT_8_BIT,
+    e16 = VK_SAMPLE_COUNT_16_BIT,
+    e32 = VK_SAMPLE_COUNT_32_BIT,
+    e64 = VK_SAMPLE_COUNT_64_BIT
+  };
+
+  using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
+
+  VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
+  {
+    return SampleCountFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
+  {
+    return ~( SampleCountFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<SampleCountFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
+    };
+  };
+
+  struct ImageFormatProperties
+  {
+    operator const VkImageFormatProperties&() const
+    {
+      return *reinterpret_cast<const VkImageFormatProperties*>(this);
+    }
+
+    bool operator==( ImageFormatProperties const& rhs ) const
+    {
+      return ( maxExtent == rhs.maxExtent )
+          && ( maxMipLevels == rhs.maxMipLevels )
+          && ( maxArrayLayers == rhs.maxArrayLayers )
+          && ( sampleCounts == rhs.sampleCounts )
+          && ( maxResourceSize == rhs.maxResourceSize );
+    }
+
+    bool operator!=( ImageFormatProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Extent3D maxExtent;
+    uint32_t maxMipLevels;
+    uint32_t maxArrayLayers;
+    SampleCountFlags sampleCounts;
+    DeviceSize maxResourceSize;
+  };
+  static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
+
+  struct ImageCreateInfo
+  {
+    ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(), ImageType imageType_ = ImageType::e1D, Format format_ = Format::eUndefined, Extent3D extent_ = Extent3D(), uint32_t mipLevels_ = 0, uint32_t arrayLayers_ = 0, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, ImageLayout initialLayout_ = ImageLayout::eUndefined )
+      : sType( StructureType::eImageCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , imageType( imageType_ )
+      , format( format_ )
+      , extent( extent_ )
+      , mipLevels( mipLevels_ )
+      , arrayLayers( arrayLayers_ )
+      , samples( samples_ )
+      , tiling( tiling_ )
+      , usage( usage_ )
+      , sharingMode( sharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+      , initialLayout( initialLayout_ )
+    {
+    }
+
+    ImageCreateInfo( VkImageCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageCreateInfo ) );
+    }
+
+    ImageCreateInfo& operator=( VkImageCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImageCreateInfo ) );
+      return *this;
+    }
+    ImageCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageCreateInfo& setFlags( ImageCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImageCreateInfo& setImageType( ImageType imageType_ )
+    {
+      imageType = imageType_;
+      return *this;
+    }
+
+    ImageCreateInfo& setFormat( Format format_ )
+    {
+      format = format_;
+      return *this;
+    }
+
+    ImageCreateInfo& setExtent( Extent3D extent_ )
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    ImageCreateInfo& setMipLevels( uint32_t mipLevels_ )
+    {
+      mipLevels = mipLevels_;
+      return *this;
+    }
+
+    ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ )
+    {
+      arrayLayers = arrayLayers_;
+      return *this;
+    }
+
+    ImageCreateInfo& setSamples( SampleCountFlagBits samples_ )
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    ImageCreateInfo& setTiling( ImageTiling tiling_ )
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    ImageCreateInfo& setUsage( ImageUsageFlags usage_ )
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    ImageCreateInfo& setSharingMode( SharingMode sharingMode_ )
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ )
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    operator const VkImageCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkImageCreateInfo*>(this);
+    }
+
+    bool operator==( ImageCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( imageType == rhs.imageType )
+          && ( format == rhs.format )
+          && ( extent == rhs.extent )
+          && ( mipLevels == rhs.mipLevels )
+          && ( arrayLayers == rhs.arrayLayers )
+          && ( samples == rhs.samples )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( initialLayout == rhs.initialLayout );
+    }
+
+    bool operator!=( ImageCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ImageCreateFlags flags;
+    ImageType imageType;
+    Format format;
+    Extent3D extent;
+    uint32_t mipLevels;
+    uint32_t arrayLayers;
+    SampleCountFlagBits samples;
+    ImageTiling tiling;
+    ImageUsageFlags usage;
+    SharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    ImageLayout initialLayout;
+  };
+  static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PipelineMultisampleStateCreateInfo
+  {
+    PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(), SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, Bool32 sampleShadingEnable_ = 0, float minSampleShading_ = 0, const SampleMask* pSampleMask_ = nullptr, Bool32 alphaToCoverageEnable_ = 0, Bool32 alphaToOneEnable_ = 0 )
+      : sType( StructureType::ePipelineMultisampleStateCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , rasterizationSamples( rasterizationSamples_ )
+      , sampleShadingEnable( sampleShadingEnable_ )
+      , minSampleShading( minSampleShading_ )
+      , pSampleMask( pSampleMask_ )
+      , alphaToCoverageEnable( alphaToCoverageEnable_ )
+      , alphaToOneEnable( alphaToOneEnable_ )
+    {
+    }
+
+    PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
+    }
+
+    PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) );
+      return *this;
+    }
+    PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ )
+    {
+      rasterizationSamples = rasterizationSamples_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ )
+    {
+      sampleShadingEnable = sampleShadingEnable_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ )
+    {
+      minSampleShading = minSampleShading_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ )
+    {
+      pSampleMask = pSampleMask_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ )
+    {
+      alphaToCoverageEnable = alphaToCoverageEnable_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ )
+    {
+      alphaToOneEnable = alphaToOneEnable_;
+      return *this;
+    }
+
+    operator const VkPipelineMultisampleStateCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>(this);
+    }
+
+    bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rasterizationSamples == rhs.rasterizationSamples )
+          && ( sampleShadingEnable == rhs.sampleShadingEnable )
+          && ( minSampleShading == rhs.minSampleShading )
+          && ( pSampleMask == rhs.pSampleMask )
+          && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
+          && ( alphaToOneEnable == rhs.alphaToOneEnable );
+    }
+
+    bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineMultisampleStateCreateFlags flags;
+    SampleCountFlagBits rasterizationSamples;
+    Bool32 sampleShadingEnable;
+    float minSampleShading;
+    const SampleMask* pSampleMask;
+    Bool32 alphaToCoverageEnable;
+    Bool32 alphaToOneEnable;
+  };
+  static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
+
+  struct GraphicsPipelineCreateInfo
+  {
+    GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), uint32_t stageCount_ = 0, const PipelineShaderStageCreateInfo* pStages_ = nullptr, const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, const PipelineViewportStateCreateInfo* pViewportState_ = nullptr, const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, PipelineLayout layout_ = PipelineLayout(), RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
+      : sType( StructureType::eGraphicsPipelineCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , stageCount( stageCount_ )
+      , pStages( pStages_ )
+      , pVertexInputState( pVertexInputState_ )
+      , pInputAssemblyState( pInputAssemblyState_ )
+      , pTessellationState( pTessellationState_ )
+      , pViewportState( pViewportState_ )
+      , pRasterizationState( pRasterizationState_ )
+      , pMultisampleState( pMultisampleState_ )
+      , pDepthStencilState( pDepthStencilState_ )
+      , pColorBlendState( pColorBlendState_ )
+      , pDynamicState( pDynamicState_ )
+      , layout( layout_ )
+      , renderPass( renderPass_ )
+      , subpass( subpass_ )
+      , basePipelineHandle( basePipelineHandle_ )
+      , basePipelineIndex( basePipelineIndex_ )
+    {
+    }
+
+    GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) );
+    }
+
+    GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) );
+      return *this;
+    }
+    GraphicsPipelineCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ )
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ )
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ )
+    {
+      pVertexInputState = pVertexInputState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ )
+    {
+      pInputAssemblyState = pInputAssemblyState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ )
+    {
+      pTessellationState = pTessellationState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ )
+    {
+      pViewportState = pViewportState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ )
+    {
+      pRasterizationState = pRasterizationState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ )
+    {
+      pMultisampleState = pMultisampleState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ )
+    {
+      pDepthStencilState = pDepthStencilState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ )
+    {
+      pColorBlendState = pColorBlendState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ )
+    {
+      pDynamicState = pDynamicState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ )
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ )
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ )
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+    operator const VkGraphicsPipelineCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>(this);
+    }
+
+    bool operator==( GraphicsPipelineCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( pVertexInputState == rhs.pVertexInputState )
+          && ( pInputAssemblyState == rhs.pInputAssemblyState )
+          && ( pTessellationState == rhs.pTessellationState )
+          && ( pViewportState == rhs.pViewportState )
+          && ( pRasterizationState == rhs.pRasterizationState )
+          && ( pMultisampleState == rhs.pMultisampleState )
+          && ( pDepthStencilState == rhs.pDepthStencilState )
+          && ( pColorBlendState == rhs.pColorBlendState )
+          && ( pDynamicState == rhs.pDynamicState )
+          && ( layout == rhs.layout )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineCreateFlags flags;
+    uint32_t stageCount;
+    const PipelineShaderStageCreateInfo* pStages;
+    const PipelineVertexInputStateCreateInfo* pVertexInputState;
+    const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
+    const PipelineTessellationStateCreateInfo* pTessellationState;
+    const PipelineViewportStateCreateInfo* pViewportState;
+    const PipelineRasterizationStateCreateInfo* pRasterizationState;
+    const PipelineMultisampleStateCreateInfo* pMultisampleState;
+    const PipelineDepthStencilStateCreateInfo* pDepthStencilState;
+    const PipelineColorBlendStateCreateInfo* pColorBlendState;
+    const PipelineDynamicStateCreateInfo* pDynamicState;
+    PipelineLayout layout;
+    RenderPass renderPass;
+    uint32_t subpass;
+    Pipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+  };
+  static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceLimits
+  {
+    operator const VkPhysicalDeviceLimits&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLimits*>(this);
+    }
+
+    bool operator==( PhysicalDeviceLimits const& rhs ) const
+    {
+      return ( maxImageDimension1D == rhs.maxImageDimension1D )
+          && ( maxImageDimension2D == rhs.maxImageDimension2D )
+          && ( maxImageDimension3D == rhs.maxImageDimension3D )
+          && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
+          && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
+          && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
+          && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
+          && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
+          && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
+          && ( bufferImageGranularity == rhs.bufferImageGranularity )
+          && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
+          && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
+          && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
+          && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
+          && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
+          && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
+          && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
+          && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
+          && ( maxPerStageResources == rhs.maxPerStageResources )
+          && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
+          && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
+          && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
+          && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
+          && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
+          && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
+          && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
+          && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
+          && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
+          && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
+          && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
+          && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
+          && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
+          && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
+          && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
+          && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
+          && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
+          && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
+          && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
+          && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
+          && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
+          && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
+          && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
+          && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
+          && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
+          && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
+          && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
+          && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
+          && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
+          && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
+          && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
+          && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
+          && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
+          && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
+          && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
+          && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
+          && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
+          && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
+          && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
+          && ( maxViewports == rhs.maxViewports )
+          && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
+          && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
+          && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
+          && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
+          && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
+          && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
+          && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
+          && ( minTexelOffset == rhs.minTexelOffset )
+          && ( maxTexelOffset == rhs.maxTexelOffset )
+          && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
+          && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
+          && ( minInterpolationOffset == rhs.minInterpolationOffset )
+          && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
+          && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
+          && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
+          && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
+          && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
+          && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
+          && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
+          && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
+          && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
+          && ( maxColorAttachments == rhs.maxColorAttachments )
+          && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
+          && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
+          && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
+          && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
+          && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
+          && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
+          && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
+          && ( timestampPeriod == rhs.timestampPeriod )
+          && ( maxClipDistances == rhs.maxClipDistances )
+          && ( maxCullDistances == rhs.maxCullDistances )
+          && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
+          && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
+          && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
+          && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
+          && ( pointSizeGranularity == rhs.pointSizeGranularity )
+          && ( lineWidthGranularity == rhs.lineWidthGranularity )
+          && ( strictLines == rhs.strictLines )
+          && ( standardSampleLocations == rhs.standardSampleLocations )
+          && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
+          && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
+          && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
+    }
+
+    bool operator!=( PhysicalDeviceLimits const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t maxImageDimension1D;
+    uint32_t maxImageDimension2D;
+    uint32_t maxImageDimension3D;
+    uint32_t maxImageDimensionCube;
+    uint32_t maxImageArrayLayers;
+    uint32_t maxTexelBufferElements;
+    uint32_t maxUniformBufferRange;
+    uint32_t maxStorageBufferRange;
+    uint32_t maxPushConstantsSize;
+    uint32_t maxMemoryAllocationCount;
+    uint32_t maxSamplerAllocationCount;
+    DeviceSize bufferImageGranularity;
+    DeviceSize sparseAddressSpaceSize;
+    uint32_t maxBoundDescriptorSets;
+    uint32_t maxPerStageDescriptorSamplers;
+    uint32_t maxPerStageDescriptorUniformBuffers;
+    uint32_t maxPerStageDescriptorStorageBuffers;
+    uint32_t maxPerStageDescriptorSampledImages;
+    uint32_t maxPerStageDescriptorStorageImages;
+    uint32_t maxPerStageDescriptorInputAttachments;
+    uint32_t maxPerStageResources;
+    uint32_t maxDescriptorSetSamplers;
+    uint32_t maxDescriptorSetUniformBuffers;
+    uint32_t maxDescriptorSetUniformBuffersDynamic;
+    uint32_t maxDescriptorSetStorageBuffers;
+    uint32_t maxDescriptorSetStorageBuffersDynamic;
+    uint32_t maxDescriptorSetSampledImages;
+    uint32_t maxDescriptorSetStorageImages;
+    uint32_t maxDescriptorSetInputAttachments;
+    uint32_t maxVertexInputAttributes;
+    uint32_t maxVertexInputBindings;
+    uint32_t maxVertexInputAttributeOffset;
+    uint32_t maxVertexInputBindingStride;
+    uint32_t maxVertexOutputComponents;
+    uint32_t maxTessellationGenerationLevel;
+    uint32_t maxTessellationPatchSize;
+    uint32_t maxTessellationControlPerVertexInputComponents;
+    uint32_t maxTessellationControlPerVertexOutputComponents;
+    uint32_t maxTessellationControlPerPatchOutputComponents;
+    uint32_t maxTessellationControlTotalOutputComponents;
+    uint32_t maxTessellationEvaluationInputComponents;
+    uint32_t maxTessellationEvaluationOutputComponents;
+    uint32_t maxGeometryShaderInvocations;
+    uint32_t maxGeometryInputComponents;
+    uint32_t maxGeometryOutputComponents;
+    uint32_t maxGeometryOutputVertices;
+    uint32_t maxGeometryTotalOutputComponents;
+    uint32_t maxFragmentInputComponents;
+    uint32_t maxFragmentOutputAttachments;
+    uint32_t maxFragmentDualSrcAttachments;
+    uint32_t maxFragmentCombinedOutputResources;
+    uint32_t maxComputeSharedMemorySize;
+    uint32_t maxComputeWorkGroupCount[3];
+    uint32_t maxComputeWorkGroupInvocations;
+    uint32_t maxComputeWorkGroupSize[3];
+    uint32_t subPixelPrecisionBits;
+    uint32_t subTexelPrecisionBits;
+    uint32_t mipmapPrecisionBits;
+    uint32_t maxDrawIndexedIndexValue;
+    uint32_t maxDrawIndirectCount;
+    float maxSamplerLodBias;
+    float maxSamplerAnisotropy;
+    uint32_t maxViewports;
+    uint32_t maxViewportDimensions[2];
+    float viewportBoundsRange[2];
+    uint32_t viewportSubPixelBits;
+    size_t minMemoryMapAlignment;
+    DeviceSize minTexelBufferOffsetAlignment;
+    DeviceSize minUniformBufferOffsetAlignment;
+    DeviceSize minStorageBufferOffsetAlignment;
+    int32_t minTexelOffset;
+    uint32_t maxTexelOffset;
+    int32_t minTexelGatherOffset;
+    uint32_t maxTexelGatherOffset;
+    float minInterpolationOffset;
+    float maxInterpolationOffset;
+    uint32_t subPixelInterpolationOffsetBits;
+    uint32_t maxFramebufferWidth;
+    uint32_t maxFramebufferHeight;
+    uint32_t maxFramebufferLayers;
+    SampleCountFlags framebufferColorSampleCounts;
+    SampleCountFlags framebufferDepthSampleCounts;
+    SampleCountFlags framebufferStencilSampleCounts;
+    SampleCountFlags framebufferNoAttachmentsSampleCounts;
+    uint32_t maxColorAttachments;
+    SampleCountFlags sampledImageColorSampleCounts;
+    SampleCountFlags sampledImageIntegerSampleCounts;
+    SampleCountFlags sampledImageDepthSampleCounts;
+    SampleCountFlags sampledImageStencilSampleCounts;
+    SampleCountFlags storageImageSampleCounts;
+    uint32_t maxSampleMaskWords;
+    Bool32 timestampComputeAndGraphics;
+    float timestampPeriod;
+    uint32_t maxClipDistances;
+    uint32_t maxCullDistances;
+    uint32_t maxCombinedClipAndCullDistances;
+    uint32_t discreteQueuePriorities;
+    float pointSizeRange[2];
+    float lineWidthRange[2];
+    float pointSizeGranularity;
+    float lineWidthGranularity;
+    Bool32 strictLines;
+    Bool32 standardSampleLocations;
+    DeviceSize optimalBufferCopyOffsetAlignment;
+    DeviceSize optimalBufferCopyRowPitchAlignment;
+    DeviceSize nonCoherentAtomSize;
+  };
+  static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceProperties
+  {
+    operator const VkPhysicalDeviceProperties&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties*>(this);
+    }
+
+    bool operator==( PhysicalDeviceProperties const& rhs ) const
+    {
+      return ( apiVersion == rhs.apiVersion )
+          && ( driverVersion == rhs.driverVersion )
+          && ( vendorID == rhs.vendorID )
+          && ( deviceID == rhs.deviceID )
+          && ( deviceType == rhs.deviceType )
+          && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( limits == rhs.limits )
+          && ( sparseProperties == rhs.sparseProperties );
+    }
+
+    bool operator!=( PhysicalDeviceProperties const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t apiVersion;
+    uint32_t driverVersion;
+    uint32_t vendorID;
+    uint32_t deviceID;
+    PhysicalDeviceType deviceType;
+    char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+    uint8_t pipelineCacheUUID[VK_UUID_SIZE];
+    PhysicalDeviceLimits limits;
+    PhysicalDeviceSparseProperties sparseProperties;
+  };
+  static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceProperties2KHR
+  {
+    operator const VkPhysicalDeviceProperties2KHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties2KHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceProperties2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( properties == rhs.properties );
+    }
+
+    bool operator!=( PhysicalDeviceProperties2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    PhysicalDeviceProperties properties;
+  };
+  static_assert( sizeof( PhysicalDeviceProperties2KHR ) == sizeof( VkPhysicalDeviceProperties2KHR ), "struct and wrapper have different size!" );
+
+  struct ImageFormatProperties2KHR
+  {
+    operator const VkImageFormatProperties2KHR&() const
+    {
+      return *reinterpret_cast<const VkImageFormatProperties2KHR*>(this);
+    }
+
+    bool operator==( ImageFormatProperties2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageFormatProperties == rhs.imageFormatProperties );
+    }
+
+    bool operator!=( ImageFormatProperties2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    ImageFormatProperties imageFormatProperties;
+  };
+  static_assert( sizeof( ImageFormatProperties2KHR ) == sizeof( VkImageFormatProperties2KHR ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceSparseImageFormatInfo2KHR
+  {
+    PhysicalDeviceSparseImageFormatInfo2KHR( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageUsageFlags usage_ = ImageUsageFlags(), ImageTiling tiling_ = ImageTiling::eOptimal )
+      : sType( StructureType::ePhysicalDeviceSparseImageFormatInfo2KHR )
+      , pNext( nullptr )
+      , format( format_ )
+      , type( type_ )
+      , samples( samples_ )
+      , usage( usage_ )
+      , tiling( tiling_ )
+    {
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2KHR( VkPhysicalDeviceSparseImageFormatInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2KHR ) );
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2KHR& operator=( VkPhysicalDeviceSparseImageFormatInfo2KHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2KHR ) );
+      return *this;
+    }
+    PhysicalDeviceSparseImageFormatInfo2KHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2KHR& setFormat( Format format_ )
+    {
+      format = format_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2KHR& setType( ImageType type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2KHR& setSamples( SampleCountFlagBits samples_ )
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2KHR& setUsage( ImageUsageFlags usage_ )
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2KHR& setTiling( ImageTiling tiling_ )
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceSparseImageFormatInfo2KHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceSparseImageFormatInfo2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( type == rhs.type )
+          && ( samples == rhs.samples )
+          && ( usage == rhs.usage )
+          && ( tiling == rhs.tiling );
+    }
+
+    bool operator!=( PhysicalDeviceSparseImageFormatInfo2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Format format;
+    ImageType type;
+    SampleCountFlagBits samples;
+    ImageUsageFlags usage;
+    ImageTiling tiling;
+  };
+  static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2KHR ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2KHR ), "struct and wrapper have different size!" );
+
+  enum class AttachmentDescriptionFlagBits
+  {
+    eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
+  };
+
+  using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
+
+  VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
+  {
+    return AttachmentDescriptionFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
+  {
+    return ~( AttachmentDescriptionFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<AttachmentDescriptionFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
+    };
+  };
+
+  struct AttachmentDescription
+  {
+    AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined )
+      : flags( flags_ )
+      , format( format_ )
+      , samples( samples_ )
+      , loadOp( loadOp_ )
+      , storeOp( storeOp_ )
+      , stencilLoadOp( stencilLoadOp_ )
+      , stencilStoreOp( stencilStoreOp_ )
+      , initialLayout( initialLayout_ )
+      , finalLayout( finalLayout_ )
+    {
+    }
+
+    AttachmentDescription( VkAttachmentDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AttachmentDescription ) );
+    }
+
+    AttachmentDescription& operator=( VkAttachmentDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( AttachmentDescription ) );
+      return *this;
+    }
+    AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AttachmentDescription& setFormat( Format format_ )
+    {
+      format = format_;
+      return *this;
+    }
+
+    AttachmentDescription& setSamples( SampleCountFlagBits samples_ )
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ )
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ )
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ )
+    {
+      stencilLoadOp = stencilLoadOp_;
+      return *this;
+    }
+
+    AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ )
+    {
+      stencilStoreOp = stencilStoreOp_;
+      return *this;
+    }
+
+    AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ )
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ )
+    {
+      finalLayout = finalLayout_;
+      return *this;
+    }
+
+    operator const VkAttachmentDescription&() const
+    {
+      return *reinterpret_cast<const VkAttachmentDescription*>(this);
+    }
+
+    bool operator==( AttachmentDescription const& rhs ) const
+    {
+      return ( flags == rhs.flags )
+          && ( format == rhs.format )
+          && ( samples == rhs.samples )
+          && ( loadOp == rhs.loadOp )
+          && ( storeOp == rhs.storeOp )
+          && ( stencilLoadOp == rhs.stencilLoadOp )
+          && ( stencilStoreOp == rhs.stencilStoreOp )
+          && ( initialLayout == rhs.initialLayout )
+          && ( finalLayout == rhs.finalLayout );
+    }
+
+    bool operator!=( AttachmentDescription const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    AttachmentDescriptionFlags flags;
+    Format format;
+    SampleCountFlagBits samples;
+    AttachmentLoadOp loadOp;
+    AttachmentStoreOp storeOp;
+    AttachmentLoadOp stencilLoadOp;
+    AttachmentStoreOp stencilStoreOp;
+    ImageLayout initialLayout;
+    ImageLayout finalLayout;
+  };
+  static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
+
+  enum class StencilFaceFlagBits
+  {
+    eFront = VK_STENCIL_FACE_FRONT_BIT,
+    eBack = VK_STENCIL_FACE_BACK_BIT,
+    eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
+  };
+
+  using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
+
+  VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
+  {
+    return StencilFaceFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
+  {
+    return ~( StencilFaceFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<StencilFaceFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
+    };
+  };
+
+  enum class DescriptorPoolCreateFlagBits
+  {
+    eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
+  };
+
+  using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
+
+  VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
+  {
+    return DescriptorPoolCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
+  {
+    return ~( DescriptorPoolCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet)
+    };
+  };
+
+  struct DescriptorPoolCreateInfo
+  {
+    DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr )
+      : sType( StructureType::eDescriptorPoolCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , maxSets( maxSets_ )
+      , poolSizeCount( poolSizeCount_ )
+      , pPoolSizes( pPoolSizes_ )
+    {
+    }
+
+    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) );
+    }
+
+    DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) );
+      return *this;
+    }
+    DescriptorPoolCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ )
+    {
+      maxSets = maxSets_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ )
+    {
+      poolSizeCount = poolSizeCount_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ )
+    {
+      pPoolSizes = pPoolSizes_;
+      return *this;
+    }
+
+    operator const VkDescriptorPoolCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>(this);
+    }
+
+    bool operator==( DescriptorPoolCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( maxSets == rhs.maxSets )
+          && ( poolSizeCount == rhs.poolSizeCount )
+          && ( pPoolSizes == rhs.pPoolSizes );
+    }
+
+    bool operator!=( DescriptorPoolCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DescriptorPoolCreateFlags flags;
+    uint32_t maxSets;
+    uint32_t poolSizeCount;
+    const DescriptorPoolSize* pPoolSizes;
+  };
+  static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class DependencyFlagBits
+  {
+    eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+    eViewLocalKHX = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHX,
+    eDeviceGroupKHX = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHX
+  };
+
+  using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
+
+  VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
+  {
+    return DependencyFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
+  {
+    return ~( DependencyFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<DependencyFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eViewLocalKHX) | VkFlags(DependencyFlagBits::eDeviceGroupKHX)
+    };
+  };
+
+  struct SubpassDependency
+  {
+    SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
+      : srcSubpass( srcSubpass_ )
+      , dstSubpass( dstSubpass_ )
+      , srcStageMask( srcStageMask_ )
+      , dstStageMask( dstStageMask_ )
+      , srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , dependencyFlags( dependencyFlags_ )
+    {
+    }
+
+    SubpassDependency( VkSubpassDependency const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SubpassDependency ) );
+    }
+
+    SubpassDependency& operator=( VkSubpassDependency const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SubpassDependency ) );
+      return *this;
+    }
+    SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
+    {
+      srcSubpass = srcSubpass_;
+      return *this;
+    }
+
+    SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
+    {
+      dstSubpass = dstSubpass_;
+      return *this;
+    }
+
+    SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+
+    operator const VkSubpassDependency&() const
+    {
+      return *reinterpret_cast<const VkSubpassDependency*>(this);
+    }
+
+    bool operator==( SubpassDependency const& rhs ) const
+    {
+      return ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags );
+    }
+
+    bool operator!=( SubpassDependency const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t srcSubpass;
+    uint32_t dstSubpass;
+    PipelineStageFlags srcStageMask;
+    PipelineStageFlags dstStageMask;
+    AccessFlags srcAccessMask;
+    AccessFlags dstAccessMask;
+    DependencyFlags dependencyFlags;
+  };
+  static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+
+  enum class PresentModeKHR
+  {
+    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+    eFifo = VK_PRESENT_MODE_FIFO_KHR,
+    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+    eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
+    eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
+  };
+
+  enum class ColorSpaceKHR
+  {
+    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
+    eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
+    eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT,
+    eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
+    eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
+    eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
+    eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
+    eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
+    eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
+    eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
+    eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
+    eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
+    ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
+    eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT
+  };
+
+  struct SurfaceFormatKHR
+  {
+    operator const VkSurfaceFormatKHR&() const
+    {
+      return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
+    }
+
+    bool operator==( SurfaceFormatKHR const& rhs ) const
+    {
+      return ( format == rhs.format )
+          && ( colorSpace == rhs.colorSpace );
+    }
+
+    bool operator!=( SurfaceFormatKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    Format format;
+    ColorSpaceKHR colorSpace;
+  };
+  static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+
+  struct SurfaceFormat2KHR
+  {
+    operator const VkSurfaceFormat2KHR&() const
+    {
+      return *reinterpret_cast<const VkSurfaceFormat2KHR*>(this);
+    }
+
+    bool operator==( SurfaceFormat2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceFormat == rhs.surfaceFormat );
+    }
+
+    bool operator!=( SurfaceFormat2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    SurfaceFormatKHR surfaceFormat;
+  };
+  static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+
+  enum class DisplayPlaneAlphaFlagBitsKHR
+  {
+    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+  };
+
+  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
+
+  VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
+  {
+    return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
+    };
+  };
+
+  struct DisplayPlaneCapabilitiesKHR
+  {
+    operator const VkDisplayPlaneCapabilitiesKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
+    }
+
+    bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
+    {
+      return ( supportedAlpha == rhs.supportedAlpha )
+          && ( minSrcPosition == rhs.minSrcPosition )
+          && ( maxSrcPosition == rhs.maxSrcPosition )
+          && ( minSrcExtent == rhs.minSrcExtent )
+          && ( maxSrcExtent == rhs.maxSrcExtent )
+          && ( minDstPosition == rhs.minDstPosition )
+          && ( maxDstPosition == rhs.maxDstPosition )
+          && ( minDstExtent == rhs.minDstExtent )
+          && ( maxDstExtent == rhs.maxDstExtent );
+    }
+
+    bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DisplayPlaneAlphaFlagsKHR supportedAlpha;
+    Offset2D minSrcPosition;
+    Offset2D maxSrcPosition;
+    Extent2D minSrcExtent;
+    Extent2D maxSrcExtent;
+    Offset2D minDstPosition;
+    Offset2D maxDstPosition;
+    Extent2D minDstExtent;
+    Extent2D maxDstExtent;
+  };
+  static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+  enum class CompositeAlphaFlagBitsKHR
+  {
+    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+  };
+
+  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
+
+  VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
+  {
+    return ~( CompositeAlphaFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
+    };
+  };
+
+  enum class SurfaceTransformFlagBitsKHR
+  {
+    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+  };
+
+  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
+
+  VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
+  {
+    return ~( SurfaceTransformFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
+    };
+  };
+
+  struct DisplayPropertiesKHR
+  {
+    operator const VkDisplayPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
+    }
+
+    bool operator==( DisplayPropertiesKHR const& rhs ) const
+    {
+      return ( display == rhs.display )
+          && ( displayName == rhs.displayName )
+          && ( physicalDimensions == rhs.physicalDimensions )
+          && ( physicalResolution == rhs.physicalResolution )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( planeReorderPossible == rhs.planeReorderPossible )
+          && ( persistentContent == rhs.persistentContent );
+    }
+
+    bool operator!=( DisplayPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    DisplayKHR display;
+    const char* displayName;
+    Extent2D physicalDimensions;
+    Extent2D physicalResolution;
+    SurfaceTransformFlagsKHR supportedTransforms;
+    Bool32 planeReorderPossible;
+    Bool32 persistentContent;
+  };
+  static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+
+  struct DisplaySurfaceCreateInfoKHR
+  {
+    DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
+      : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , displayMode( displayMode_ )
+      , planeIndex( planeIndex_ )
+      , planeStackIndex( planeStackIndex_ )
+      , transform( transform_ )
+      , globalAlpha( globalAlpha_ )
+      , alphaMode( alphaMode_ )
+      , imageExtent( imageExtent_ )
+    {
+    }
+
+    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
+    }
+
+    DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) );
+      return *this;
+    }
+    DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
+    {
+      displayMode = displayMode_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
+    {
+      planeIndex = planeIndex_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
+    {
+      planeStackIndex = planeStackIndex_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
+    {
+      globalAlpha = globalAlpha_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
+    {
+      alphaMode = alphaMode_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    operator const VkDisplaySurfaceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( displayMode == rhs.displayMode )
+          && ( planeIndex == rhs.planeIndex )
+          && ( planeStackIndex == rhs.planeStackIndex )
+          && ( transform == rhs.transform )
+          && ( globalAlpha == rhs.globalAlpha )
+          && ( alphaMode == rhs.alphaMode )
+          && ( imageExtent == rhs.imageExtent );
+    }
+
+    bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DisplaySurfaceCreateFlagsKHR flags;
+    DisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    SurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    DisplayPlaneAlphaFlagBitsKHR alphaMode;
+    Extent2D imageExtent;
+  };
+  static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  struct SurfaceCapabilitiesKHR
+  {
+    operator const VkSurfaceCapabilitiesKHR&() const
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
+    }
+
+    bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
+    {
+      return ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags );
+    }
+
+    bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    uint32_t minImageCount;
+    uint32_t maxImageCount;
+    Extent2D currentExtent;
+    Extent2D minImageExtent;
+    Extent2D maxImageExtent;
+    uint32_t maxImageArrayLayers;
+    SurfaceTransformFlagsKHR supportedTransforms;
+    SurfaceTransformFlagBitsKHR currentTransform;
+    CompositeAlphaFlagsKHR supportedCompositeAlpha;
+    ImageUsageFlags supportedUsageFlags;
+  };
+  static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+
+  struct SurfaceCapabilities2KHR
+  {
+    operator const VkSurfaceCapabilities2KHR&() const
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>(this);
+    }
+
+    bool operator==( SurfaceCapabilities2KHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCapabilities == rhs.surfaceCapabilities );
+    }
+
+    bool operator!=( SurfaceCapabilities2KHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    SurfaceCapabilitiesKHR surfaceCapabilities;
+  };
+  static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
+
+  enum class DebugReportFlagBitsEXT
+  {
+    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+  };
+
+  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
+
+  VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
+  {
+    return DebugReportFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
+  {
+    return ~( DebugReportFlagsEXT( bits ) );
+  }
+
+  template <> struct FlagTraits<DebugReportFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
+    };
+  };
+
+  struct DebugReportCallbackCreateInfoEXT
+  {
+    DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
+      : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , pfnCallback( pfnCallback_ )
+      , pUserData( pUserData_ )
+    {
+    }
+
+    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
+    }
+
+    DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) );
+      return *this;
+    }
+    DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
+    {
+      pfnCallback = pfnCallback_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    operator const VkDebugReportCallbackCreateInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
+    }
+
+    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnCallback == rhs.pfnCallback )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DebugReportFlagsEXT flags;
+    PFN_vkDebugReportCallbackEXT pfnCallback;
+    void* pUserData;
+  };
+  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class DebugReportObjectTypeEXT
+  {
+    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+    eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+    eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+    eDebugReportCallbackExt = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+    eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+    eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
+    eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,
+    eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT
+  };
+
+  struct DebugMarkerObjectNameInfoEXT
+  {
+    DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
+      : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
+      , pNext( nullptr )
+      , objectType( objectType_ )
+      , object( object_ )
+      , pObjectName( pObjectName_ )
+    {
+    }
+
+    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
+    }
+
+    DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) );
+      return *this;
+    }
+    DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+
+    operator const VkDebugMarkerObjectNameInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
+    }
+
+    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( pObjectName == rhs.pObjectName );
+    }
+
+    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    const char* pObjectName;
+  };
+  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+
+  struct DebugMarkerObjectTagInfoEXT
+  {
+    DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
+      : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
+      , pNext( nullptr )
+      , objectType( objectType_ )
+      , object( object_ )
+      , tagName( tagName_ )
+      , tagSize( tagSize_ )
+      , pTag( pTag_ )
+    {
+    }
+
+    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
+    }
+
+    DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) );
+      return *this;
+    }
+    DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+    operator const VkDebugMarkerObjectTagInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
+    }
+
+    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+    }
+
+    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    uint64_t tagName;
+    size_t tagSize;
+    const void* pTag;
+  };
+  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class RasterizationOrderAMD
+  {
+    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+  };
+
+  struct PipelineRasterizationStateRasterizationOrderAMD
+  {
+    PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
+      : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
+      , pNext( nullptr )
+      , rasterizationOrder( rasterizationOrder_ )
+    {
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) );
+      return *this;
+    }
+    PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
+    {
+      rasterizationOrder = rasterizationOrder_;
+      return *this;
+    }
+
+    operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
+    }
+
+    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rasterizationOrder == rhs.rasterizationOrder );
+    }
+
+    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    RasterizationOrderAMD rasterizationOrder;
+  };
+  static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
+
+  enum class ExternalMemoryHandleTypeFlagBitsNV
+  {
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+  };
+
+  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
+
+  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
+  {
+    return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
+    };
+  };
+
+  struct ExternalMemoryImageCreateInfoNV
+  {
+    ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+      : sType( StructureType::eExternalMemoryImageCreateInfoNV )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
+    }
+
+    ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) );
+      return *this;
+    }
+    ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExternalMemoryImageCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
+    }
+
+    bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsNV handleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+
+  struct ExportMemoryAllocateInfoNV
+  {
+    ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
+      : sType( StructureType::eExportMemoryAllocateInfoNV )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
+    }
+
+    ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) );
+      return *this;
+    }
+    ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExportMemoryAllocateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
+    }
+
+    bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsNV handleTypes;
+  };
+  static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportMemoryWin32HandleInfoNV
+  {
+    ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
+      : sType( StructureType::eImportMemoryWin32HandleInfoNV )
+      , pNext( nullptr )
+      , handleType( handleType_ )
+      , handle( handle_ )
+    {
+    }
+
+    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
+    }
+
+    ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) );
+      return *this;
+    }
+    ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    operator const VkImportMemoryWin32HandleInfoNV&() const
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
+    }
+
+    bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle );
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsNV handleType;
+    HANDLE handle;
+  };
+  static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  enum class ExternalMemoryFeatureFlagBitsNV
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+  };
+
+  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+
+  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
+  {
+    return ~( ExternalMemoryFeatureFlagsNV( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
+    };
+  };
+
+  struct ExternalImageFormatPropertiesNV
+  {
+    operator const VkExternalImageFormatPropertiesNV&() const
+    {
+      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
+    }
+
+    bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
+    {
+      return ( imageFormatProperties == rhs.imageFormatProperties )
+          && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+    }
+
+    bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ImageFormatProperties imageFormatProperties;
+    ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
+    ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
+    ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+  };
+  static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+
+  enum class ValidationCheckEXT
+  {
+    eAll = VK_VALIDATION_CHECK_ALL_EXT,
+    eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
+  };
+
+  struct ValidationFlagsEXT
+  {
+    ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
+      : sType( StructureType::eValidationFlagsEXT )
+      , pNext( nullptr )
+      , disabledValidationCheckCount( disabledValidationCheckCount_ )
+      , pDisabledValidationChecks( pDisabledValidationChecks_ )
+    {
+    }
+
+    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) );
+    }
+
+    ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) );
+      return *this;
+    }
+    ValidationFlagsEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
+    {
+      disabledValidationCheckCount = disabledValidationCheckCount_;
+      return *this;
+    }
+
+    ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
+    {
+      pDisabledValidationChecks = pDisabledValidationChecks_;
+      return *this;
+    }
+
+    operator const VkValidationFlagsEXT&() const
+    {
+      return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
+    }
+
+    bool operator==( ValidationFlagsEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
+          && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+    }
+
+    bool operator!=( ValidationFlagsEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t disabledValidationCheckCount;
+    ValidationCheckEXT* pDisabledValidationChecks;
+  };
+  static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+
+  enum class IndirectCommandsLayoutUsageFlagBitsNVX
+  {
+    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
+    eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
+    eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
+    eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
+  };
+
+  using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
+
+  VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
+  {
+    return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
+  {
+    return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
+  }
+
+  template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
+  {
+    enum
+    {
+      allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
+    };
+  };
+
+  enum class ObjectEntryUsageFlagBitsNVX
+  {
+    eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
+    eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
+  };
+
+  using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
+
+  VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
+  {
+    return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
+  {
+    return ~( ObjectEntryUsageFlagsNVX( bits ) );
+  }
+
+  template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
+  {
+    enum
+    {
+      allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
+    };
+  };
+
+  enum class IndirectCommandsTokenTypeNVX
+  {
+    ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
+    eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX,
+    eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX,
+    eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX,
+    ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX,
+    eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX,
+    eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX,
+    eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX
+  };
+
+  struct IndirectCommandsTokenNVX
+  {
+    IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 )
+      : tokenType( tokenType_ )
+      , buffer( buffer_ )
+      , offset( offset_ )
+    {
+    }
+
+    IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) );
+    }
+
+    IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) );
+      return *this;
+    }
+    IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    operator const VkIndirectCommandsTokenNVX&() const
+    {
+      return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
+    }
+
+    bool operator==( IndirectCommandsTokenNVX const& rhs ) const
+    {
+      return ( tokenType == rhs.tokenType )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    IndirectCommandsTokenTypeNVX tokenType;
+    Buffer buffer;
+    DeviceSize offset;
+  };
+  static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
+
+  struct IndirectCommandsLayoutTokenNVX
+  {
+    IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 )
+      : tokenType( tokenType_ )
+      , bindingUnit( bindingUnit_ )
+      , dynamicCount( dynamicCount_ )
+      , divisor( divisor_ )
+    {
+    }
+
+    IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) );
+    }
+
+    IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) );
+      return *this;
+    }
+    IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
+    {
+      bindingUnit = bindingUnit_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
+    {
+      dynamicCount = dynamicCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
+    {
+      divisor = divisor_;
+      return *this;
+    }
+
+    operator const VkIndirectCommandsLayoutTokenNVX&() const
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
+    }
+
+    bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
+    {
+      return ( tokenType == rhs.tokenType )
+          && ( bindingUnit == rhs.bindingUnit )
+          && ( dynamicCount == rhs.dynamicCount )
+          && ( divisor == rhs.divisor );
+    }
+
+    bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    IndirectCommandsTokenTypeNVX tokenType;
+    uint32_t bindingUnit;
+    uint32_t dynamicCount;
+    uint32_t divisor;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
+
+  struct IndirectCommandsLayoutCreateInfoNVX
+  {
+    IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
+      : sType( StructureType::eIndirectCommandsLayoutCreateInfoNVX )
+      , pNext( nullptr )
+      , pipelineBindPoint( pipelineBindPoint_ )
+      , flags( flags_ )
+      , tokenCount( tokenCount_ )
+      , pTokens( pTokens_ )
+    {
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) );
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) );
+      return *this;
+    }
+    IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
+    {
+      tokenCount = tokenCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
+    {
+      pTokens = pTokens_;
+      return *this;
+    }
+
+    operator const VkIndirectCommandsLayoutCreateInfoNVX&() const
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
+    }
+
+    bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( flags == rhs.flags )
+          && ( tokenCount == rhs.tokenCount )
+          && ( pTokens == rhs.pTokens );
+    }
+
+    bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineBindPoint pipelineBindPoint;
+    IndirectCommandsLayoutUsageFlagsNVX flags;
+    uint32_t tokenCount;
+    const IndirectCommandsLayoutTokenNVX* pTokens;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
+
+  enum class ObjectEntryTypeNVX
+  {
+    eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
+    ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX,
+    eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX,
+    eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX,
+    ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX
+  };
+
+  struct ObjectTableCreateInfoNVX
+  {
+    ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 )
+      : sType( StructureType::eObjectTableCreateInfoNVX )
+      , pNext( nullptr )
+      , objectCount( objectCount_ )
+      , pObjectEntryTypes( pObjectEntryTypes_ )
+      , pObjectEntryCounts( pObjectEntryCounts_ )
+      , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
+      , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
+      , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
+      , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
+      , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
+      , maxPipelineLayouts( maxPipelineLayouts_ )
+    {
+    }
+
+    ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) );
+    }
+
+    ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) );
+      return *this;
+    }
+    ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
+    {
+      objectCount = objectCount_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
+    {
+      pObjectEntryTypes = pObjectEntryTypes_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
+    {
+      pObjectEntryCounts = pObjectEntryCounts_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
+    {
+      pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
+    {
+      maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
+    {
+      maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
+    {
+      maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
+    {
+      maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
+    {
+      maxPipelineLayouts = maxPipelineLayouts_;
+      return *this;
+    }
+
+    operator const VkObjectTableCreateInfoNVX&() const
+    {
+      return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
+    }
+
+    bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectCount == rhs.objectCount )
+          && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
+          && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
+          && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
+          && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
+          && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
+          && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
+          && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
+          && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
+    }
+
+    bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t objectCount;
+    const ObjectEntryTypeNVX* pObjectEntryTypes;
+    const uint32_t* pObjectEntryCounts;
+    const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
+    uint32_t maxUniformBuffersPerDescriptor;
+    uint32_t maxStorageBuffersPerDescriptor;
+    uint32_t maxStorageImagesPerDescriptor;
+    uint32_t maxSampledImagesPerDescriptor;
+    uint32_t maxPipelineLayouts;
+  };
+  static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTableEntryNVX
+  {
+    ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
+      : type( type_ )
+      , flags( flags_ )
+    {
+    }
+
+    ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) );
+    }
+
+    ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) );
+      return *this;
+    }
+    ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator const VkObjectTableEntryNVX&() const
+    {
+      return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
+    }
+
+    bool operator==( ObjectTableEntryNVX const& rhs ) const
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( ObjectTableEntryNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+  };
+  static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTablePipelineEntryNVX
+  {
+    ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() )
+      : type( type_ )
+      , flags( flags_ )
+      , pipeline( pipeline_ )
+    {
+    }
+
+    ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) );
+    }
+
+    ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) );
+      return *this;
+    }
+    ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    operator const VkObjectTablePipelineEntryNVX&() const
+    {
+      return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
+    }
+
+    bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipeline == rhs.pipeline );
+    }
+
+    bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    Pipeline pipeline;
+  };
+  static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTableDescriptorSetEntryNVX
+  {
+    ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() )
+      : type( type_ )
+      , flags( flags_ )
+      , pipelineLayout( pipelineLayout_ )
+      , descriptorSet( descriptorSet_ )
+    {
+    }
+
+    ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) );
+    }
+
+    ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) );
+      return *this;
+    }
+    ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
+    {
+      descriptorSet = descriptorSet_;
+      return *this;
+    }
+
+    operator const VkObjectTableDescriptorSetEntryNVX&() const
+    {
+      return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
+    }
+
+    bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( descriptorSet == rhs.descriptorSet );
+    }
+
+    bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    PipelineLayout pipelineLayout;
+    DescriptorSet descriptorSet;
+  };
+  static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTableVertexBufferEntryNVX
+  {
+    ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
+      : type( type_ )
+      , flags( flags_ )
+      , buffer( buffer_ )
+    {
+    }
+
+    ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) );
+    }
+
+    ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) );
+      return *this;
+    }
+    ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator const VkObjectTableVertexBufferEntryNVX&() const
+    {
+      return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
+    }
+
+    bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    Buffer buffer;
+  };
+  static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTableIndexBufferEntryNVX
+  {
+    ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer(), IndexType indexType_ = IndexType::eUint16 )
+      : type( type_ )
+      , flags( flags_ )
+      , buffer( buffer_ )
+      , indexType( indexType_ )
+    {
+    }
+
+    ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) );
+    }
+
+    ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) );
+      return *this;
+    }
+    ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    ObjectTableIndexBufferEntryNVX& setIndexType( IndexType indexType_ )
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    operator const VkObjectTableIndexBufferEntryNVX&() const
+    {
+      return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
+    }
+
+    bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer )
+          && ( indexType == rhs.indexType );
+    }
+
+    bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    Buffer buffer;
+    IndexType indexType;
+  };
+  static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
+
+  struct ObjectTablePushConstantEntryNVX
+  {
+    ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() )
+      : type( type_ )
+      , flags( flags_ )
+      , pipelineLayout( pipelineLayout_ )
+      , stageFlags( stageFlags_ )
+    {
+    }
+
+    ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) );
+    }
+
+    ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) );
+      return *this;
+    }
+    ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    operator const VkObjectTablePushConstantEntryNVX&() const
+    {
+      return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
+    }
+
+    bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( stageFlags == rhs.stageFlags );
+    }
+
+    bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ObjectEntryTypeNVX type;
+    ObjectEntryUsageFlagsNVX flags;
+    PipelineLayout pipelineLayout;
+    ShaderStageFlags stageFlags;
+  };
+  static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
+
+  enum class DescriptorSetLayoutCreateFlagBits
+  {
+    ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
+  };
+
+  using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
+
+  VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
+  {
+    return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits )
+  {
+    return ~( DescriptorSetLayoutCreateFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR)
+    };
+  };
+
+  struct DescriptorSetLayoutCreateInfo
+  {
+    DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(), uint32_t bindingCount_ = 0, const DescriptorSetLayoutBinding* pBindings_ = nullptr )
+      : sType( StructureType::eDescriptorSetLayoutCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , bindingCount( bindingCount_ )
+      , pBindings( pBindings_ )
+    {
+    }
+
+    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
+    }
+
+    DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) );
+      return *this;
+    }
+    DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ )
+    {
+      bindingCount = bindingCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ )
+    {
+      pBindings = pBindings_;
+      return *this;
+    }
+
+    operator const VkDescriptorSetLayoutCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>(this);
+    }
+
+    bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindings == rhs.pBindings );
+    }
+
+    bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DescriptorSetLayoutCreateFlags flags;
+    uint32_t bindingCount;
+    const DescriptorSetLayoutBinding* pBindings;
+  };
+  static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class ExternalMemoryHandleTypeFlagBitsKHR
+  {
+    eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
+    eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR,
+    eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR,
+    eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR,
+    eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR
+  };
+
+  using ExternalMemoryHandleTypeFlagsKHR = Flags<ExternalMemoryHandleTypeFlagBitsKHR, VkExternalMemoryHandleTypeFlagsKHR>;
+
+  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsKHR operator|( ExternalMemoryHandleTypeFlagBitsKHR bit0, ExternalMemoryHandleTypeFlagBitsKHR bit1 )
+  {
+    return ExternalMemoryHandleTypeFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsKHR operator~( ExternalMemoryHandleTypeFlagBitsKHR bits )
+  {
+    return ~( ExternalMemoryHandleTypeFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsKHR::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBitsKHR::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsKHR::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBitsKHR::eD3D12Resource)
+    };
+  };
+
+  struct PhysicalDeviceExternalImageFormatInfoKHR
+  {
+    PhysicalDeviceExternalImageFormatInfoKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType_ = ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::ePhysicalDeviceExternalImageFormatInfoKHR )
+      , pNext( nullptr )
+      , handleType( handleType_ )
+    {
+    }
+
+    PhysicalDeviceExternalImageFormatInfoKHR( VkPhysicalDeviceExternalImageFormatInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfoKHR ) );
+    }
+
+    PhysicalDeviceExternalImageFormatInfoKHR& operator=( VkPhysicalDeviceExternalImageFormatInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfoKHR ) );
+      return *this;
+    }
+    PhysicalDeviceExternalImageFormatInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalImageFormatInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceExternalImageFormatInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfoKHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceExternalImageFormatInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalImageFormatInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalImageFormatInfoKHR ) == sizeof( VkPhysicalDeviceExternalImageFormatInfoKHR ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceExternalBufferInfoKHR
+  {
+    PhysicalDeviceExternalBufferInfoKHR( BufferCreateFlags flags_ = BufferCreateFlags(), BufferUsageFlags usage_ = BufferUsageFlags(), ExternalMemoryHandleTypeFlagBitsKHR handleType_ = ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::ePhysicalDeviceExternalBufferInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , usage( usage_ )
+      , handleType( handleType_ )
+    {
+    }
+
+    PhysicalDeviceExternalBufferInfoKHR( VkPhysicalDeviceExternalBufferInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfoKHR ) );
+    }
+
+    PhysicalDeviceExternalBufferInfoKHR& operator=( VkPhysicalDeviceExternalBufferInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfoKHR ) );
+      return *this;
+    }
+    PhysicalDeviceExternalBufferInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfoKHR& setFlags( BufferCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfoKHR& setUsage( BufferUsageFlags usage_ )
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceExternalBufferInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfoKHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceExternalBufferInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( usage == rhs.usage )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalBufferInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    BufferCreateFlags flags;
+    BufferUsageFlags usage;
+    ExternalMemoryHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalBufferInfoKHR ) == sizeof( VkPhysicalDeviceExternalBufferInfoKHR ), "struct and wrapper have different size!" );
+
+  struct ExternalMemoryImageCreateInfoKHR
+  {
+    ExternalMemoryImageCreateInfoKHR( ExternalMemoryHandleTypeFlagsKHR handleTypes_ = ExternalMemoryHandleTypeFlagsKHR() )
+      : sType( StructureType::eExternalMemoryImageCreateInfoKHR )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExternalMemoryImageCreateInfoKHR( VkExternalMemoryImageCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoKHR ) );
+    }
+
+    ExternalMemoryImageCreateInfoKHR& operator=( VkExternalMemoryImageCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoKHR ) );
+      return *this;
+    }
+    ExternalMemoryImageCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoKHR& setHandleTypes( ExternalMemoryHandleTypeFlagsKHR handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExternalMemoryImageCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoKHR*>(this);
+    }
+
+    bool operator==( ExternalMemoryImageCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsKHR handleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryImageCreateInfoKHR ) == sizeof( VkExternalMemoryImageCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  struct ExternalMemoryBufferCreateInfoKHR
+  {
+    ExternalMemoryBufferCreateInfoKHR( ExternalMemoryHandleTypeFlagsKHR handleTypes_ = ExternalMemoryHandleTypeFlagsKHR() )
+      : sType( StructureType::eExternalMemoryBufferCreateInfoKHR )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExternalMemoryBufferCreateInfoKHR( VkExternalMemoryBufferCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfoKHR ) );
+    }
+
+    ExternalMemoryBufferCreateInfoKHR& operator=( VkExternalMemoryBufferCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfoKHR ) );
+      return *this;
+    }
+    ExternalMemoryBufferCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryBufferCreateInfoKHR& setHandleTypes( ExternalMemoryHandleTypeFlagsKHR handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExternalMemoryBufferCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExternalMemoryBufferCreateInfoKHR*>(this);
+    }
+
+    bool operator==( ExternalMemoryBufferCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryBufferCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsKHR handleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryBufferCreateInfoKHR ) == sizeof( VkExternalMemoryBufferCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  struct ExportMemoryAllocateInfoKHR
+  {
+    ExportMemoryAllocateInfoKHR( ExternalMemoryHandleTypeFlagsKHR handleTypes_ = ExternalMemoryHandleTypeFlagsKHR() )
+      : sType( StructureType::eExportMemoryAllocateInfoKHR )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExportMemoryAllocateInfoKHR( VkExportMemoryAllocateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoKHR ) );
+    }
+
+    ExportMemoryAllocateInfoKHR& operator=( VkExportMemoryAllocateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoKHR ) );
+      return *this;
+    }
+    ExportMemoryAllocateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoKHR& setHandleTypes( ExternalMemoryHandleTypeFlagsKHR handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExportMemoryAllocateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfoKHR*>(this);
+    }
+
+    bool operator==( ExportMemoryAllocateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportMemoryAllocateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagsKHR handleTypes;
+  };
+  static_assert( sizeof( ExportMemoryAllocateInfoKHR ) == sizeof( VkExportMemoryAllocateInfoKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportMemoryWin32HandleInfoKHR
+  {
+    ImportMemoryWin32HandleInfoKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType_ = ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 )
+      : sType( StructureType::eImportMemoryWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , handleType( handleType_ )
+      , handle( handle_ )
+      , name( name_ )
+    {
+    }
+
+    ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
+    }
+
+    ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) );
+      return *this;
+    }
+    ImportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR& setHandle( HANDLE handle_ )
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ )
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator const VkImportMemoryWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagBitsKHR handleType;
+    HANDLE handle;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct MemoryGetWin32HandleInfoKHR
+  {
+    MemoryGetWin32HandleInfoKHR( DeviceMemory memory_ = DeviceMemory(), ExternalMemoryHandleTypeFlagBitsKHR handleType_ = ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::eMemoryGetWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , memory( memory_ )
+      , handleType( handleType_ )
+    {
+    }
+
+    MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
+    }
+
+    MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) );
+      return *this;
+    }
+    MemoryGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR& setMemory( DeviceMemory memory_ )
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkMemoryGetWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceMemory memory;
+    ExternalMemoryHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ImportMemoryFdInfoKHR
+  {
+    ImportMemoryFdInfoKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType_ = ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd, int fd_ = 0 )
+      : sType( StructureType::eImportMemoryFdInfoKHR )
+      , pNext( nullptr )
+      , handleType( handleType_ )
+      , fd( fd_ )
+    {
+    }
+
+    ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) );
+    }
+
+    ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) );
+      return *this;
+    }
+    ImportMemoryFdInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR& setFd( int fd_ )
+    {
+      fd = fd_;
+      return *this;
+    }
+
+    operator const VkImportMemoryFdInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>(this);
+    }
+
+    bool operator==( ImportMemoryFdInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalMemoryHandleTypeFlagBitsKHR handleType;
+    int fd;
+  };
+  static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
+
+  struct MemoryGetFdInfoKHR
+  {
+    MemoryGetFdInfoKHR( DeviceMemory memory_ = DeviceMemory(), ExternalMemoryHandleTypeFlagBitsKHR handleType_ = ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::eMemoryGetFdInfoKHR )
+      , pNext( nullptr )
+      , memory( memory_ )
+      , handleType( handleType_ )
+    {
+    }
+
+    MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) );
+    }
+
+    MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) );
+      return *this;
+    }
+    MemoryGetFdInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR& setMemory( DeviceMemory memory_ )
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkMemoryGetFdInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>(this);
+    }
+
+    bool operator==( MemoryGetFdInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( MemoryGetFdInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceMemory memory;
+    ExternalMemoryHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class ExternalMemoryFeatureFlagBitsKHR
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR
+  };
+
+  using ExternalMemoryFeatureFlagsKHR = Flags<ExternalMemoryFeatureFlagBitsKHR, VkExternalMemoryFeatureFlagsKHR>;
+
+  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsKHR operator|( ExternalMemoryFeatureFlagBitsKHR bit0, ExternalMemoryFeatureFlagBitsKHR bit1 )
+  {
+    return ExternalMemoryFeatureFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsKHR operator~( ExternalMemoryFeatureFlagBitsKHR bits )
+  {
+    return ~( ExternalMemoryFeatureFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryFeatureFlagBitsKHR::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsKHR::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsKHR::eImportable)
+    };
+  };
+
+  struct ExternalMemoryPropertiesKHR
+  {
+    operator const VkExternalMemoryPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkExternalMemoryPropertiesKHR*>(this);
+    }
+
+    bool operator==( ExternalMemoryPropertiesKHR const& rhs ) const
+    {
+      return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+    }
+
+    bool operator!=( ExternalMemoryPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ExternalMemoryFeatureFlagsKHR externalMemoryFeatures;
+    ExternalMemoryHandleTypeFlagsKHR exportFromImportedHandleTypes;
+    ExternalMemoryHandleTypeFlagsKHR compatibleHandleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryPropertiesKHR ) == sizeof( VkExternalMemoryPropertiesKHR ), "struct and wrapper have different size!" );
+
+  struct ExternalImageFormatPropertiesKHR
+  {
+    operator const VkExternalImageFormatPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkExternalImageFormatPropertiesKHR*>(this);
+    }
+
+    bool operator==( ExternalImageFormatPropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+    }
+
+    bool operator!=( ExternalImageFormatPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    ExternalMemoryPropertiesKHR externalMemoryProperties;
+  };
+  static_assert( sizeof( ExternalImageFormatPropertiesKHR ) == sizeof( VkExternalImageFormatPropertiesKHR ), "struct and wrapper have different size!" );
+
+  struct ExternalBufferPropertiesKHR
+  {
+    operator const VkExternalBufferPropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkExternalBufferPropertiesKHR*>(this);
+    }
+
+    bool operator==( ExternalBufferPropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+    }
+
+    bool operator!=( ExternalBufferPropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    ExternalMemoryPropertiesKHR externalMemoryProperties;
+  };
+  static_assert( sizeof( ExternalBufferPropertiesKHR ) == sizeof( VkExternalBufferPropertiesKHR ), "struct and wrapper have different size!" );
+
+  enum class ExternalSemaphoreHandleTypeFlagBitsKHR
+  {
+    eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
+    eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
+    eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
+    eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR,
+    eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR
+  };
+
+  using ExternalSemaphoreHandleTypeFlagsKHR = Flags<ExternalSemaphoreHandleTypeFlagBitsKHR, VkExternalSemaphoreHandleTypeFlagsKHR>;
+
+  VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlagsKHR operator|( ExternalSemaphoreHandleTypeFlagBitsKHR bit0, ExternalSemaphoreHandleTypeFlagBitsKHR bit1 )
+  {
+    return ExternalSemaphoreHandleTypeFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlagsKHR operator~( ExternalSemaphoreHandleTypeFlagBitsKHR bits )
+  {
+    return ~( ExternalSemaphoreHandleTypeFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBitsKHR::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBitsKHR::eSyncFd)
+    };
+  };
+
+  struct PhysicalDeviceExternalSemaphoreInfoKHR
+  {
+    PhysicalDeviceExternalSemaphoreInfoKHR( ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ = ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::ePhysicalDeviceExternalSemaphoreInfoKHR )
+      , pNext( nullptr )
+      , handleType( handleType_ )
+    {
+    }
+
+    PhysicalDeviceExternalSemaphoreInfoKHR( VkPhysicalDeviceExternalSemaphoreInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfoKHR ) );
+    }
+
+    PhysicalDeviceExternalSemaphoreInfoKHR& operator=( VkPhysicalDeviceExternalSemaphoreInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfoKHR ) );
+      return *this;
+    }
+    PhysicalDeviceExternalSemaphoreInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceExternalSemaphoreInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfoKHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceExternalSemaphoreInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalSemaphoreInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalSemaphoreHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfoKHR ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfoKHR ), "struct and wrapper have different size!" );
+
+  struct ExportSemaphoreCreateInfoKHR
+  {
+    ExportSemaphoreCreateInfoKHR( ExternalSemaphoreHandleTypeFlagsKHR handleTypes_ = ExternalSemaphoreHandleTypeFlagsKHR() )
+      : sType( StructureType::eExportSemaphoreCreateInfoKHR )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExportSemaphoreCreateInfoKHR( VkExportSemaphoreCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfoKHR ) );
+    }
+
+    ExportSemaphoreCreateInfoKHR& operator=( VkExportSemaphoreCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfoKHR ) );
+      return *this;
+    }
+    ExportSemaphoreCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportSemaphoreCreateInfoKHR& setHandleTypes( ExternalSemaphoreHandleTypeFlagsKHR handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExportSemaphoreCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExportSemaphoreCreateInfoKHR*>(this);
+    }
+
+    bool operator==( ExportSemaphoreCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportSemaphoreCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalSemaphoreHandleTypeFlagsKHR handleTypes;
+  };
+  static_assert( sizeof( ExportSemaphoreCreateInfoKHR ) == sizeof( VkExportSemaphoreCreateInfoKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SemaphoreGetWin32HandleInfoKHR
+  {
+    SemaphoreGetWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ = ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::eSemaphoreGetWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , semaphore( semaphore_ )
+      , handleType( handleType_ )
+    {
+    }
+
+    SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
+    }
+
+    SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) );
+      return *this;
+    }
+    SemaphoreGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ )
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkSemaphoreGetWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Semaphore semaphore;
+    ExternalSemaphoreHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct SemaphoreGetFdInfoKHR
+  {
+    SemaphoreGetFdInfoKHR( Semaphore semaphore_ = Semaphore(), ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ = ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::eSemaphoreGetFdInfoKHR )
+      , pNext( nullptr )
+      , semaphore( semaphore_ )
+      , handleType( handleType_ )
+    {
+    }
+
+    SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
+    }
+
+    SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) );
+      return *this;
+    }
+    SemaphoreGetFdInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR& setSemaphore( Semaphore semaphore_ )
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkSemaphoreGetFdInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>(this);
+    }
+
+    bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Semaphore semaphore;
+    ExternalSemaphoreHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class ExternalSemaphoreFeatureFlagBitsKHR
+  {
+    eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR,
+    eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR
+  };
+
+  using ExternalSemaphoreFeatureFlagsKHR = Flags<ExternalSemaphoreFeatureFlagBitsKHR, VkExternalSemaphoreFeatureFlagsKHR>;
+
+  VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlagsKHR operator|( ExternalSemaphoreFeatureFlagBitsKHR bit0, ExternalSemaphoreFeatureFlagBitsKHR bit1 )
+  {
+    return ExternalSemaphoreFeatureFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlagsKHR operator~( ExternalSemaphoreFeatureFlagBitsKHR bits )
+  {
+    return ~( ExternalSemaphoreFeatureFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalSemaphoreFeatureFlagBitsKHR::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBitsKHR::eImportable)
+    };
+  };
+
+  struct ExternalSemaphorePropertiesKHR
+  {
+    operator const VkExternalSemaphorePropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkExternalSemaphorePropertiesKHR*>(this);
+    }
+
+    bool operator==( ExternalSemaphorePropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+    }
+
+    bool operator!=( ExternalSemaphorePropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    ExternalSemaphoreHandleTypeFlagsKHR exportFromImportedHandleTypes;
+    ExternalSemaphoreHandleTypeFlagsKHR compatibleHandleTypes;
+    ExternalSemaphoreFeatureFlagsKHR externalSemaphoreFeatures;
+  };
+  static_assert( sizeof( ExternalSemaphorePropertiesKHR ) == sizeof( VkExternalSemaphorePropertiesKHR ), "struct and wrapper have different size!" );
+
+  enum class SemaphoreImportFlagBitsKHR
+  {
+    eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR
+  };
+
+  using SemaphoreImportFlagsKHR = Flags<SemaphoreImportFlagBitsKHR, VkSemaphoreImportFlagsKHR>;
+
+  VULKAN_HPP_INLINE SemaphoreImportFlagsKHR operator|( SemaphoreImportFlagBitsKHR bit0, SemaphoreImportFlagBitsKHR bit1 )
+  {
+    return SemaphoreImportFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SemaphoreImportFlagsKHR operator~( SemaphoreImportFlagBitsKHR bits )
+  {
+    return ~( SemaphoreImportFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<SemaphoreImportFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(SemaphoreImportFlagBitsKHR::eTemporary)
+    };
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportSemaphoreWin32HandleInfoKHR
+  {
+    ImportSemaphoreWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), SemaphoreImportFlagsKHR flags_ = SemaphoreImportFlagsKHR(), ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ = ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 )
+      : sType( StructureType::eImportSemaphoreWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , semaphore( semaphore_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , handle( handle_ )
+      , name( name_ )
+    {
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) );
+      return *this;
+    }
+    ImportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ )
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR& setFlags( SemaphoreImportFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR& setHandle( HANDLE handle_ )
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ )
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator const VkImportSemaphoreWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Semaphore semaphore;
+    SemaphoreImportFlagsKHR flags;
+    ExternalSemaphoreHandleTypeFlagBitsKHR handleType;
+    HANDLE handle;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ImportSemaphoreFdInfoKHR
+  {
+    ImportSemaphoreFdInfoKHR( Semaphore semaphore_ = Semaphore(), SemaphoreImportFlagsKHR flags_ = SemaphoreImportFlagsKHR(), ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ = ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd, int fd_ = 0 )
+      : sType( StructureType::eImportSemaphoreFdInfoKHR )
+      , pNext( nullptr )
+      , semaphore( semaphore_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , fd( fd_ )
+    {
+    }
+
+    ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
+    }
+
+    ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) );
+      return *this;
+    }
+    ImportSemaphoreFdInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR& setSemaphore( Semaphore semaphore_ )
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR& setFlags( SemaphoreImportFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR& setFd( int fd_ )
+    {
+      fd = fd_;
+      return *this;
+    }
+
+    operator const VkImportSemaphoreFdInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>(this);
+    }
+
+    bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Semaphore semaphore;
+    SemaphoreImportFlagsKHR flags;
+    ExternalSemaphoreHandleTypeFlagBitsKHR handleType;
+    int fd;
+  };
+  static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class ExternalFenceHandleTypeFlagBitsKHR
+  {
+    eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
+    eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
+    eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
+    eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR
+  };
+
+  using ExternalFenceHandleTypeFlagsKHR = Flags<ExternalFenceHandleTypeFlagBitsKHR, VkExternalFenceHandleTypeFlagsKHR>;
+
+  VULKAN_HPP_INLINE ExternalFenceHandleTypeFlagsKHR operator|( ExternalFenceHandleTypeFlagBitsKHR bit0, ExternalFenceHandleTypeFlagBitsKHR bit1 )
+  {
+    return ExternalFenceHandleTypeFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalFenceHandleTypeFlagsKHR operator~( ExternalFenceHandleTypeFlagBitsKHR bits )
+  {
+    return ~( ExternalFenceHandleTypeFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalFenceHandleTypeFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBitsKHR::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBitsKHR::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBitsKHR::eSyncFd)
+    };
+  };
+
+  struct PhysicalDeviceExternalFenceInfoKHR
+  {
+    PhysicalDeviceExternalFenceInfoKHR( ExternalFenceHandleTypeFlagBitsKHR handleType_ = ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::ePhysicalDeviceExternalFenceInfoKHR )
+      , pNext( nullptr )
+      , handleType( handleType_ )
+    {
+    }
+
+    PhysicalDeviceExternalFenceInfoKHR( VkPhysicalDeviceExternalFenceInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfoKHR ) );
+    }
+
+    PhysicalDeviceExternalFenceInfoKHR& operator=( VkPhysicalDeviceExternalFenceInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfoKHR ) );
+      return *this;
+    }
+    PhysicalDeviceExternalFenceInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalFenceInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkPhysicalDeviceExternalFenceInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfoKHR*>(this);
+    }
+
+    bool operator==( PhysicalDeviceExternalFenceInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalFenceInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalFenceHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalFenceInfoKHR ) == sizeof( VkPhysicalDeviceExternalFenceInfoKHR ), "struct and wrapper have different size!" );
+
+  struct ExportFenceCreateInfoKHR
+  {
+    ExportFenceCreateInfoKHR( ExternalFenceHandleTypeFlagsKHR handleTypes_ = ExternalFenceHandleTypeFlagsKHR() )
+      : sType( StructureType::eExportFenceCreateInfoKHR )
+      , pNext( nullptr )
+      , handleTypes( handleTypes_ )
+    {
+    }
+
+    ExportFenceCreateInfoKHR( VkExportFenceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportFenceCreateInfoKHR ) );
+    }
+
+    ExportFenceCreateInfoKHR& operator=( VkExportFenceCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ExportFenceCreateInfoKHR ) );
+      return *this;
+    }
+    ExportFenceCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportFenceCreateInfoKHR& setHandleTypes( ExternalFenceHandleTypeFlagsKHR handleTypes_ )
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator const VkExportFenceCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkExportFenceCreateInfoKHR*>(this);
+    }
+
+    bool operator==( ExportFenceCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportFenceCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ExternalFenceHandleTypeFlagsKHR handleTypes;
+  };
+  static_assert( sizeof( ExportFenceCreateInfoKHR ) == sizeof( VkExportFenceCreateInfoKHR ), "struct and wrapper have different size!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct FenceGetWin32HandleInfoKHR
+  {
+    FenceGetWin32HandleInfoKHR( Fence fence_ = Fence(), ExternalFenceHandleTypeFlagBitsKHR handleType_ = ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::eFenceGetWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , fence( fence_ )
+      , handleType( handleType_ )
+    {
+    }
+
+    FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
+    }
+
+    FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) );
+      return *this;
+    }
+    FenceGetWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR& setFence( Fence fence_ )
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkFenceGetWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Fence fence;
+    ExternalFenceHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct FenceGetFdInfoKHR
+  {
+    FenceGetFdInfoKHR( Fence fence_ = Fence(), ExternalFenceHandleTypeFlagBitsKHR handleType_ = ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd )
+      : sType( StructureType::eFenceGetFdInfoKHR )
+      , pNext( nullptr )
+      , fence( fence_ )
+      , handleType( handleType_ )
+    {
+    }
+
+    FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) );
+    }
+
+    FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) );
+      return *this;
+    }
+    FenceGetFdInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceGetFdInfoKHR& setFence( Fence fence_ )
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    FenceGetFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator const VkFenceGetFdInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkFenceGetFdInfoKHR*>(this);
+    }
+
+    bool operator==( FenceGetFdInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( FenceGetFdInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Fence fence;
+    ExternalFenceHandleTypeFlagBitsKHR handleType;
+  };
+  static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class ExternalFenceFeatureFlagBitsKHR
+  {
+    eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR,
+    eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR
+  };
+
+  using ExternalFenceFeatureFlagsKHR = Flags<ExternalFenceFeatureFlagBitsKHR, VkExternalFenceFeatureFlagsKHR>;
+
+  VULKAN_HPP_INLINE ExternalFenceFeatureFlagsKHR operator|( ExternalFenceFeatureFlagBitsKHR bit0, ExternalFenceFeatureFlagBitsKHR bit1 )
+  {
+    return ExternalFenceFeatureFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE ExternalFenceFeatureFlagsKHR operator~( ExternalFenceFeatureFlagBitsKHR bits )
+  {
+    return ~( ExternalFenceFeatureFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<ExternalFenceFeatureFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalFenceFeatureFlagBitsKHR::eExportable) | VkFlags(ExternalFenceFeatureFlagBitsKHR::eImportable)
+    };
+  };
+
+  struct ExternalFencePropertiesKHR
+  {
+    operator const VkExternalFencePropertiesKHR&() const
+    {
+      return *reinterpret_cast<const VkExternalFencePropertiesKHR*>(this);
+    }
+
+    bool operator==( ExternalFencePropertiesKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalFenceFeatures == rhs.externalFenceFeatures );
+    }
+
+    bool operator!=( ExternalFencePropertiesKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    ExternalFenceHandleTypeFlagsKHR exportFromImportedHandleTypes;
+    ExternalFenceHandleTypeFlagsKHR compatibleHandleTypes;
+    ExternalFenceFeatureFlagsKHR externalFenceFeatures;
+  };
+  static_assert( sizeof( ExternalFencePropertiesKHR ) == sizeof( VkExternalFencePropertiesKHR ), "struct and wrapper have different size!" );
+
+  enum class FenceImportFlagBitsKHR
+  {
+    eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT_KHR
+  };
+
+  using FenceImportFlagsKHR = Flags<FenceImportFlagBitsKHR, VkFenceImportFlagsKHR>;
+
+  VULKAN_HPP_INLINE FenceImportFlagsKHR operator|( FenceImportFlagBitsKHR bit0, FenceImportFlagBitsKHR bit1 )
+  {
+    return FenceImportFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE FenceImportFlagsKHR operator~( FenceImportFlagBitsKHR bits )
+  {
+    return ~( FenceImportFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<FenceImportFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(FenceImportFlagBitsKHR::eTemporary)
+    };
+  };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportFenceWin32HandleInfoKHR
+  {
+    ImportFenceWin32HandleInfoKHR( Fence fence_ = Fence(), FenceImportFlagsKHR flags_ = FenceImportFlagsKHR(), ExternalFenceHandleTypeFlagBitsKHR handleType_ = ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 )
+      : sType( StructureType::eImportFenceWin32HandleInfoKHR )
+      , pNext( nullptr )
+      , fence( fence_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , handle( handle_ )
+      , name( name_ )
+    {
+    }
+
+    ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
+    }
+
+    ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) );
+      return *this;
+    }
+    ImportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR& setFence( Fence fence_ )
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR& setFlags( FenceImportFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR& setHandle( HANDLE handle_ )
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ )
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator const VkImportFenceWin32HandleInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>(this);
+    }
+
+    bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Fence fence;
+    FenceImportFlagsKHR flags;
+    ExternalFenceHandleTypeFlagBitsKHR handleType;
+    HANDLE handle;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ImportFenceFdInfoKHR
+  {
+    ImportFenceFdInfoKHR( Fence fence_ = Fence(), FenceImportFlagsKHR flags_ = FenceImportFlagsKHR(), ExternalFenceHandleTypeFlagBitsKHR handleType_ = ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd, int fd_ = 0 )
+      : sType( StructureType::eImportFenceFdInfoKHR )
+      , pNext( nullptr )
+      , fence( fence_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , fd( fd_ )
+    {
+    }
+
+    ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) );
+    }
+
+    ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) );
+      return *this;
+    }
+    ImportFenceFdInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR& setFence( Fence fence_ )
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR& setFlags( FenceImportFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBitsKHR handleType_ )
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR& setFd( int fd_ )
+    {
+      fd = fd_;
+      return *this;
+    }
+
+    operator const VkImportFenceFdInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkImportFenceFdInfoKHR*>(this);
+    }
+
+    bool operator==( ImportFenceFdInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportFenceFdInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Fence fence;
+    FenceImportFlagsKHR flags;
+    ExternalFenceHandleTypeFlagBitsKHR handleType;
+    int fd;
+  };
+  static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class SurfaceCounterFlagBitsEXT
+  {
+    eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
+  };
+
+  using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
+
+  VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 )
+  {
+    return SurfaceCounterFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits )
+  {
+    return ~( SurfaceCounterFlagsEXT( bits ) );
+  }
+
+  template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank)
+    };
+  };
+
+  struct SurfaceCapabilities2EXT
+  {
+    operator const VkSurfaceCapabilities2EXT&() const
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>(this);
+    }
+
+    bool operator==( SurfaceCapabilities2EXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags )
+          && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
+    }
+
+    bool operator!=( SurfaceCapabilities2EXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t minImageCount;
+    uint32_t maxImageCount;
+    Extent2D currentExtent;
+    Extent2D minImageExtent;
+    Extent2D maxImageExtent;
+    uint32_t maxImageArrayLayers;
+    SurfaceTransformFlagsKHR supportedTransforms;
+    SurfaceTransformFlagBitsKHR currentTransform;
+    CompositeAlphaFlagsKHR supportedCompositeAlpha;
+    ImageUsageFlags supportedUsageFlags;
+    SurfaceCounterFlagsEXT supportedSurfaceCounters;
+  };
+  static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
+
+  struct SwapchainCounterCreateInfoEXT
+  {
+    SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = SurfaceCounterFlagsEXT() )
+      : sType( StructureType::eSwapchainCounterCreateInfoEXT )
+      , pNext( nullptr )
+      , surfaceCounters( surfaceCounters_ )
+    {
+    }
+
+    SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
+    }
+
+    SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) );
+      return *this;
+    }
+    SwapchainCounterCreateInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainCounterCreateInfoEXT& setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ )
+    {
+      surfaceCounters = surfaceCounters_;
+      return *this;
+    }
+
+    operator const VkSwapchainCounterCreateInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>(this);
+    }
+
+    bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCounters == rhs.surfaceCounters );
+    }
+
+    bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SurfaceCounterFlagsEXT surfaceCounters;
+  };
+  static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class DisplayPowerStateEXT
+  {
+    eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
+    eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
+    eOn = VK_DISPLAY_POWER_STATE_ON_EXT
+  };
+
+  struct DisplayPowerInfoEXT
+  {
+    DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff )
+      : sType( StructureType::eDisplayPowerInfoEXT )
+      , pNext( nullptr )
+      , powerState( powerState_ )
+    {
+    }
+
+    DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) );
+    }
+
+    DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) );
+      return *this;
+    }
+    DisplayPowerInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayPowerInfoEXT& setPowerState( DisplayPowerStateEXT powerState_ )
+    {
+      powerState = powerState_;
+      return *this;
+    }
+
+    operator const VkDisplayPowerInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDisplayPowerInfoEXT*>(this);
+    }
+
+    bool operator==( DisplayPowerInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( powerState == rhs.powerState );
+    }
+
+    bool operator!=( DisplayPowerInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DisplayPowerStateEXT powerState;
+  };
+  static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class DeviceEventTypeEXT
+  {
+    eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
+  };
+
+  struct DeviceEventInfoEXT
+  {
+    DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug )
+      : sType( StructureType::eDeviceEventInfoEXT )
+      , pNext( nullptr )
+      , deviceEvent( deviceEvent_ )
+    {
+    }
+
+    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) );
+    }
+
+    DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) );
+      return *this;
+    }
+    DeviceEventInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceEventInfoEXT& setDeviceEvent( DeviceEventTypeEXT deviceEvent_ )
+    {
+      deviceEvent = deviceEvent_;
+      return *this;
+    }
+
+    operator const VkDeviceEventInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDeviceEventInfoEXT*>(this);
+    }
+
+    bool operator==( DeviceEventInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceEvent == rhs.deviceEvent );
+    }
+
+    bool operator!=( DeviceEventInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceEventTypeEXT deviceEvent;
+  };
+  static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class DisplayEventTypeEXT
+  {
+    eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
+  };
+
+  struct DisplayEventInfoEXT
+  {
+    DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut )
+      : sType( StructureType::eDisplayEventInfoEXT )
+      , pNext( nullptr )
+      , displayEvent( displayEvent_ )
+    {
+    }
+
+    DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) );
+    }
+
+    DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) );
+      return *this;
+    }
+    DisplayEventInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayEventInfoEXT& setDisplayEvent( DisplayEventTypeEXT displayEvent_ )
+    {
+      displayEvent = displayEvent_;
+      return *this;
+    }
+
+    operator const VkDisplayEventInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkDisplayEventInfoEXT*>(this);
+    }
+
+    bool operator==( DisplayEventInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayEvent == rhs.displayEvent );
+    }
+
+    bool operator!=( DisplayEventInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DisplayEventTypeEXT displayEvent;
+  };
+  static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class PeerMemoryFeatureFlagBitsKHX
+  {
+    eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHX,
+    eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHX,
+    eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHX,
+    eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHX
+  };
+
+  using PeerMemoryFeatureFlagsKHX = Flags<PeerMemoryFeatureFlagBitsKHX, VkPeerMemoryFeatureFlagsKHX>;
+
+  VULKAN_HPP_INLINE PeerMemoryFeatureFlagsKHX operator|( PeerMemoryFeatureFlagBitsKHX bit0, PeerMemoryFeatureFlagBitsKHX bit1 )
+  {
+    return PeerMemoryFeatureFlagsKHX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE PeerMemoryFeatureFlagsKHX operator~( PeerMemoryFeatureFlagBitsKHX bits )
+  {
+    return ~( PeerMemoryFeatureFlagsKHX( bits ) );
+  }
+
+  template <> struct FlagTraits<PeerMemoryFeatureFlagBitsKHX>
+  {
+    enum
+    {
+      allFlags = VkFlags(PeerMemoryFeatureFlagBitsKHX::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBitsKHX::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBitsKHX::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBitsKHX::eGenericDst)
+    };
+  };
+
+  enum class MemoryAllocateFlagBitsKHX
+  {
+    eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHX
+  };
+
+  using MemoryAllocateFlagsKHX = Flags<MemoryAllocateFlagBitsKHX, VkMemoryAllocateFlagsKHX>;
+
+  VULKAN_HPP_INLINE MemoryAllocateFlagsKHX operator|( MemoryAllocateFlagBitsKHX bit0, MemoryAllocateFlagBitsKHX bit1 )
+  {
+    return MemoryAllocateFlagsKHX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE MemoryAllocateFlagsKHX operator~( MemoryAllocateFlagBitsKHX bits )
+  {
+    return ~( MemoryAllocateFlagsKHX( bits ) );
+  }
+
+  template <> struct FlagTraits<MemoryAllocateFlagBitsKHX>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryAllocateFlagBitsKHX::eDeviceMask)
+    };
+  };
+
+  struct MemoryAllocateFlagsInfoKHX
+  {
+    MemoryAllocateFlagsInfoKHX( MemoryAllocateFlagsKHX flags_ = MemoryAllocateFlagsKHX(), uint32_t deviceMask_ = 0 )
+      : sType( StructureType::eMemoryAllocateFlagsInfoKHX )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , deviceMask( deviceMask_ )
+    {
+    }
+
+    MemoryAllocateFlagsInfoKHX( VkMemoryAllocateFlagsInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfoKHX ) );
+    }
+
+    MemoryAllocateFlagsInfoKHX& operator=( VkMemoryAllocateFlagsInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfoKHX ) );
+      return *this;
+    }
+    MemoryAllocateFlagsInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfoKHX& setFlags( MemoryAllocateFlagsKHX flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfoKHX& setDeviceMask( uint32_t deviceMask_ )
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    operator const VkMemoryAllocateFlagsInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkMemoryAllocateFlagsInfoKHX*>(this);
+    }
+
+    bool operator==( MemoryAllocateFlagsInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( MemoryAllocateFlagsInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    MemoryAllocateFlagsKHX flags;
+    uint32_t deviceMask;
+  };
+  static_assert( sizeof( MemoryAllocateFlagsInfoKHX ) == sizeof( VkMemoryAllocateFlagsInfoKHX ), "struct and wrapper have different size!" );
+
+  enum class DeviceGroupPresentModeFlagBitsKHX
+  {
+    eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHX,
+    eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHX,
+    eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHX,
+    eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHX
+  };
+
+  using DeviceGroupPresentModeFlagsKHX = Flags<DeviceGroupPresentModeFlagBitsKHX, VkDeviceGroupPresentModeFlagsKHX>;
+
+  VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHX operator|( DeviceGroupPresentModeFlagBitsKHX bit0, DeviceGroupPresentModeFlagBitsKHX bit1 )
+  {
+    return DeviceGroupPresentModeFlagsKHX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHX operator~( DeviceGroupPresentModeFlagBitsKHX bits )
+  {
+    return ~( DeviceGroupPresentModeFlagsKHX( bits ) );
+  }
+
+  template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHX>
+  {
+    enum
+    {
+      allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHX::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHX::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHX::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHX::eLocalMultiDevice)
+    };
+  };
+
+  struct DeviceGroupPresentCapabilitiesKHX
+  {
+    operator const VkDeviceGroupPresentCapabilitiesKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupPresentCapabilitiesKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE_KHX * sizeof( uint32_t ) ) == 0 )
+          && ( modes == rhs.modes );
+    }
+
+    bool operator!=( DeviceGroupPresentCapabilitiesKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE_KHX];
+    DeviceGroupPresentModeFlagsKHX modes;
+  };
+  static_assert( sizeof( DeviceGroupPresentCapabilitiesKHX ) == sizeof( VkDeviceGroupPresentCapabilitiesKHX ), "struct and wrapper have different size!" );
+
+  struct DeviceGroupPresentInfoKHX
+  {
+    DeviceGroupPresentInfoKHX( uint32_t swapchainCount_ = 0, const uint32_t* pDeviceMasks_ = nullptr, DeviceGroupPresentModeFlagBitsKHX mode_ = DeviceGroupPresentModeFlagBitsKHX::eLocal )
+      : sType( StructureType::eDeviceGroupPresentInfoKHX )
+      , pNext( nullptr )
+      , swapchainCount( swapchainCount_ )
+      , pDeviceMasks( pDeviceMasks_ )
+      , mode( mode_ )
+    {
+    }
+
+    DeviceGroupPresentInfoKHX( VkDeviceGroupPresentInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHX ) );
+    }
+
+    DeviceGroupPresentInfoKHX& operator=( VkDeviceGroupPresentInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHX ) );
+      return *this;
+    }
+    DeviceGroupPresentInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHX& setSwapchainCount( uint32_t swapchainCount_ )
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHX& setPDeviceMasks( const uint32_t* pDeviceMasks_ )
+    {
+      pDeviceMasks = pDeviceMasks_;
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHX& setMode( DeviceGroupPresentModeFlagBitsKHX mode_ )
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    operator const VkDeviceGroupPresentInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupPresentInfoKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupPresentInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pDeviceMasks == rhs.pDeviceMasks )
+          && ( mode == rhs.mode );
+    }
+
+    bool operator!=( DeviceGroupPresentInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t swapchainCount;
+    const uint32_t* pDeviceMasks;
+    DeviceGroupPresentModeFlagBitsKHX mode;
+  };
+  static_assert( sizeof( DeviceGroupPresentInfoKHX ) == sizeof( VkDeviceGroupPresentInfoKHX ), "struct and wrapper have different size!" );
+
+  struct DeviceGroupSwapchainCreateInfoKHX
+  {
+    DeviceGroupSwapchainCreateInfoKHX( DeviceGroupPresentModeFlagsKHX modes_ = DeviceGroupPresentModeFlagsKHX() )
+      : sType( StructureType::eDeviceGroupSwapchainCreateInfoKHX )
+      , pNext( nullptr )
+      , modes( modes_ )
+    {
+    }
+
+    DeviceGroupSwapchainCreateInfoKHX( VkDeviceGroupSwapchainCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHX ) );
+    }
+
+    DeviceGroupSwapchainCreateInfoKHX& operator=( VkDeviceGroupSwapchainCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHX ) );
+      return *this;
+    }
+    DeviceGroupSwapchainCreateInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHX& setModes( DeviceGroupPresentModeFlagsKHX modes_ )
+    {
+      modes = modes_;
+      return *this;
+    }
+
+    operator const VkDeviceGroupSwapchainCreateInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupSwapchainCreateInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( modes == rhs.modes );
+    }
+
+    bool operator!=( DeviceGroupSwapchainCreateInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    DeviceGroupPresentModeFlagsKHX modes;
+  };
+  static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHX ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHX ), "struct and wrapper have different size!" );
+
+  enum class SwapchainCreateFlagBitsKHR
+  {
+    eBindSfrKHX = VK_SWAPCHAIN_CREATE_BIND_SFR_BIT_KHX
+  };
+
+  using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
+  {
+    return SwapchainCreateFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits )
+  {
+    return ~( SwapchainCreateFlagsKHR( bits ) );
+  }
+
+  template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eBindSfrKHX)
+    };
+  };
+
+  struct SwapchainCreateInfoKHR
+  {
+    SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
+      : sType( StructureType::eSwapchainCreateInfoKHR )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , surface( surface_ )
+      , minImageCount( minImageCount_ )
+      , imageFormat( imageFormat_ )
+      , imageColorSpace( imageColorSpace_ )
+      , imageExtent( imageExtent_ )
+      , imageArrayLayers( imageArrayLayers_ )
+      , imageUsage( imageUsage_ )
+      , imageSharingMode( imageSharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+      , preTransform( preTransform_ )
+      , compositeAlpha( compositeAlpha_ )
+      , presentMode( presentMode_ )
+      , clipped( clipped_ )
+      , oldSwapchain( oldSwapchain_ )
+    {
+    }
+
+    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) );
+    }
+
+    SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) );
+      return *this;
+    }
+    SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
+    {
+      minImageCount = minImageCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
+    {
+      imageFormat = imageFormat_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
+    {
+      imageColorSpace = imageColorSpace_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
+    {
+      imageArrayLayers = imageArrayLayers_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
+    {
+      imageUsage = imageUsage_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
+    {
+      imageSharingMode = imageSharingMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
+    {
+      preTransform = preTransform_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
+    {
+      compositeAlpha = compositeAlpha_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
+    {
+      presentMode = presentMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
+    {
+      clipped = clipped_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
+    {
+      oldSwapchain = oldSwapchain_;
+      return *this;
+    }
+
+    operator const VkSwapchainCreateInfoKHR&() const
+    {
+      return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
+    }
+
+    bool operator==( SwapchainCreateInfoKHR const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( surface == rhs.surface )
+          && ( minImageCount == rhs.minImageCount )
+          && ( imageFormat == rhs.imageFormat )
+          && ( imageColorSpace == rhs.imageColorSpace )
+          && ( imageExtent == rhs.imageExtent )
+          && ( imageArrayLayers == rhs.imageArrayLayers )
+          && ( imageUsage == rhs.imageUsage )
+          && ( imageSharingMode == rhs.imageSharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( preTransform == rhs.preTransform )
+          && ( compositeAlpha == rhs.compositeAlpha )
+          && ( presentMode == rhs.presentMode )
+          && ( clipped == rhs.clipped )
+          && ( oldSwapchain == rhs.oldSwapchain );
+    }
+
+    bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SwapchainCreateFlagsKHR flags;
+    SurfaceKHR surface;
+    uint32_t minImageCount;
+    Format imageFormat;
+    ColorSpaceKHR imageColorSpace;
+    Extent2D imageExtent;
+    uint32_t imageArrayLayers;
+    ImageUsageFlags imageUsage;
+    SharingMode imageSharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    SurfaceTransformFlagBitsKHR preTransform;
+    CompositeAlphaFlagBitsKHR compositeAlpha;
+    PresentModeKHR presentMode;
+    Bool32 clipped;
+    SwapchainKHR oldSwapchain;
+  };
+  static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+
+  enum class ViewportCoordinateSwizzleNV
+  {
+    ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+    eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
+    ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+    eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
+    ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
+    eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
+    ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
+    eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
+  };
+
+  struct ViewportSwizzleNV
+  {
+    ViewportSwizzleNV( ViewportCoordinateSwizzleNV x_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV y_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV z_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV w_ = ViewportCoordinateSwizzleNV::ePositiveX )
+      : x( x_ )
+      , y( y_ )
+      , z( z_ )
+      , w( w_ )
+    {
+    }
+
+    ViewportSwizzleNV( VkViewportSwizzleNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) );
+    }
+
+    ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) );
+      return *this;
+    }
+    ViewportSwizzleNV& setX( ViewportCoordinateSwizzleNV x_ )
+    {
+      x = x_;
+      return *this;
+    }
+
+    ViewportSwizzleNV& setY( ViewportCoordinateSwizzleNV y_ )
+    {
+      y = y_;
+      return *this;
+    }
+
+    ViewportSwizzleNV& setZ( ViewportCoordinateSwizzleNV z_ )
+    {
+      z = z_;
+      return *this;
+    }
+
+    ViewportSwizzleNV& setW( ViewportCoordinateSwizzleNV w_ )
+    {
+      w = w_;
+      return *this;
+    }
+
+    operator const VkViewportSwizzleNV&() const
+    {
+      return *reinterpret_cast<const VkViewportSwizzleNV*>(this);
+    }
+
+    bool operator==( ViewportSwizzleNV const& rhs ) const
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z )
+          && ( w == rhs.w );
+    }
+
+    bool operator!=( ViewportSwizzleNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    ViewportCoordinateSwizzleNV x;
+    ViewportCoordinateSwizzleNV y;
+    ViewportCoordinateSwizzleNV z;
+    ViewportCoordinateSwizzleNV w;
+  };
+  static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+
+  struct PipelineViewportSwizzleStateCreateInfoNV
+  {
+    PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateFlagsNV flags_ = PipelineViewportSwizzleStateCreateFlagsNV(), uint32_t viewportCount_ = 0, const ViewportSwizzleNV* pViewportSwizzles_ = nullptr )
+      : sType( StructureType::ePipelineViewportSwizzleStateCreateInfoNV )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , viewportCount( viewportCount_ )
+      , pViewportSwizzles( pViewportSwizzles_ )
+    {
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) );
+      return *this;
+    }
+    PipelineViewportSwizzleStateCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV& setFlags( PipelineViewportSwizzleStateCreateFlagsNV flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ )
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV& setPViewportSwizzles( const ViewportSwizzleNV* pViewportSwizzles_ )
+    {
+      pViewportSwizzles = pViewportSwizzles_;
+      return *this;
+    }
+
+    operator const VkPipelineViewportSwizzleStateCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>(this);
+    }
+
+    bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportSwizzles == rhs.pViewportSwizzles );
+    }
+
+    bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineViewportSwizzleStateCreateFlagsNV flags;
+    uint32_t viewportCount;
+    const ViewportSwizzleNV* pViewportSwizzles;
+  };
+  static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+  enum class DiscardRectangleModeEXT
+  {
+    eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+    eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
+  };
+
+  struct PipelineDiscardRectangleStateCreateInfoEXT
+  {
+    PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateFlagsEXT flags_ = PipelineDiscardRectangleStateCreateFlagsEXT(), DiscardRectangleModeEXT discardRectangleMode_ = DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = 0, const Rect2D* pDiscardRectangles_ = nullptr )
+      : sType( StructureType::ePipelineDiscardRectangleStateCreateInfoEXT )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , discardRectangleMode( discardRectangleMode_ )
+      , discardRectangleCount( discardRectangleCount_ )
+      , pDiscardRectangles( pDiscardRectangles_ )
+    {
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) );
+      return *this;
+    }
+    PipelineDiscardRectangleStateCreateInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT& setFlags( PipelineDiscardRectangleStateCreateFlagsEXT flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleMode( DiscardRectangleModeEXT discardRectangleMode_ )
+    {
+      discardRectangleMode = discardRectangleMode_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleCount( uint32_t discardRectangleCount_ )
+    {
+      discardRectangleCount = discardRectangleCount_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT& setPDiscardRectangles( const Rect2D* pDiscardRectangles_ )
+    {
+      pDiscardRectangles = pDiscardRectangles_;
+      return *this;
+    }
+
+    operator const VkPipelineDiscardRectangleStateCreateInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>(this);
+    }
+
+    bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( discardRectangleMode == rhs.discardRectangleMode )
+          && ( discardRectangleCount == rhs.discardRectangleCount )
+          && ( pDiscardRectangles == rhs.pDiscardRectangles );
+    }
+
+    bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineDiscardRectangleStateCreateFlagsEXT flags;
+    DiscardRectangleModeEXT discardRectangleMode;
+    uint32_t discardRectangleCount;
+    const Rect2D* pDiscardRectangles;
+  };
+  static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class SubpassDescriptionFlagBits
+  {
+    ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+    ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX
+  };
+
+  using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
+
+  VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
+  {
+    return SubpassDescriptionFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits )
+  {
+    return ~( SubpassDescriptionFlags( bits ) );
+  }
+
+  template <> struct FlagTraits<SubpassDescriptionFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX)
+    };
+  };
+
+  struct SubpassDescription
+  {
+    SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = 0, const AttachmentReference* pInputAttachments_ = nullptr, uint32_t colorAttachmentCount_ = 0, const AttachmentReference* pColorAttachments_ = nullptr, const AttachmentReference* pResolveAttachments_ = nullptr, const AttachmentReference* pDepthStencilAttachment_ = nullptr, uint32_t preserveAttachmentCount_ = 0, const uint32_t* pPreserveAttachments_ = nullptr )
+      : flags( flags_ )
+      , pipelineBindPoint( pipelineBindPoint_ )
+      , inputAttachmentCount( inputAttachmentCount_ )
+      , pInputAttachments( pInputAttachments_ )
+      , colorAttachmentCount( colorAttachmentCount_ )
+      , pColorAttachments( pColorAttachments_ )
+      , pResolveAttachments( pResolveAttachments_ )
+      , pDepthStencilAttachment( pDepthStencilAttachment_ )
+      , preserveAttachmentCount( preserveAttachmentCount_ )
+      , pPreserveAttachments( pPreserveAttachments_ )
+    {
+    }
+
+    SubpassDescription( VkSubpassDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SubpassDescription ) );
+    }
+
+    SubpassDescription& operator=( VkSubpassDescription const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SubpassDescription ) );
+      return *this;
+    }
+    SubpassDescription& setFlags( SubpassDescriptionFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ )
+    {
+      inputAttachmentCount = inputAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ )
+    {
+      pInputAttachments = pInputAttachments_;
+      return *this;
+    }
+
+    SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ )
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ )
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+    SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ )
+    {
+      pResolveAttachments = pResolveAttachments_;
+      return *this;
+    }
+
+    SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ )
+    {
+      pDepthStencilAttachment = pDepthStencilAttachment_;
+      return *this;
+    }
+
+    SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ )
+    {
+      preserveAttachmentCount = preserveAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ )
+    {
+      pPreserveAttachments = pPreserveAttachments_;
+      return *this;
+    }
+
+    operator const VkSubpassDescription&() const
+    {
+      return *reinterpret_cast<const VkSubpassDescription*>(this);
+    }
+
+    bool operator==( SubpassDescription const& rhs ) const
+    {
+      return ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( inputAttachmentCount == rhs.inputAttachmentCount )
+          && ( pInputAttachments == rhs.pInputAttachments )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pResolveAttachments == rhs.pResolveAttachments )
+          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+    }
+
+    bool operator!=( SubpassDescription const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+    SubpassDescriptionFlags flags;
+    PipelineBindPoint pipelineBindPoint;
+    uint32_t inputAttachmentCount;
+    const AttachmentReference* pInputAttachments;
+    uint32_t colorAttachmentCount;
+    const AttachmentReference* pColorAttachments;
+    const AttachmentReference* pResolveAttachments;
+    const AttachmentReference* pDepthStencilAttachment;
+    uint32_t preserveAttachmentCount;
+    const uint32_t* pPreserveAttachments;
+  };
+  static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+
+  struct RenderPassCreateInfo
+  {
+    RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
+      : sType( StructureType::eRenderPassCreateInfo )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+      , subpassCount( subpassCount_ )
+      , pSubpasses( pSubpasses_ )
+      , dependencyCount( dependencyCount_ )
+      , pDependencies( pDependencies_ )
+    {
+    }
+
+    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) );
+    }
+
+    RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) );
+      return *this;
+    }
+    RenderPassCreateInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
+    {
+      pSubpasses = pSubpasses_;
+      return *this;
+    }
+
+    RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
+    {
+      pDependencies = pDependencies_;
+      return *this;
+    }
+
+    operator const VkRenderPassCreateInfo&() const
+    {
+      return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
+    }
+
+    bool operator==( RenderPassCreateInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies );
+    }
+
+    bool operator!=( RenderPassCreateInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    RenderPassCreateFlags flags;
+    uint32_t attachmentCount;
+    const AttachmentDescription* pAttachments;
+    uint32_t subpassCount;
+    const SubpassDescription* pSubpasses;
+    uint32_t dependencyCount;
+    const SubpassDependency* pDependencies;
+  };
+  static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+
+  enum class SamplerReductionModeEXT
+  {
+    eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
+    eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT,
+    eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT
+  };
+
+  struct SamplerReductionModeCreateInfoEXT
+  {
+    SamplerReductionModeCreateInfoEXT( SamplerReductionModeEXT reductionMode_ = SamplerReductionModeEXT::eWeightedAverage )
+      : sType( StructureType::eSamplerReductionModeCreateInfoEXT )
+      , pNext( nullptr )
+      , reductionMode( reductionMode_ )
+    {
+    }
+
+    SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) );
+    }
+
+    SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) );
+      return *this;
+    }
+    SamplerReductionModeCreateInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerReductionModeCreateInfoEXT& setReductionMode( SamplerReductionModeEXT reductionMode_ )
+    {
+      reductionMode = reductionMode_;
+      return *this;
+    }
+
+    operator const VkSamplerReductionModeCreateInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkSamplerReductionModeCreateInfoEXT*>(this);
+    }
+
+    bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( reductionMode == rhs.reductionMode );
+    }
+
+    bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    SamplerReductionModeEXT reductionMode;
+  };
+  static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class BlendOverlapEXT
+  {
+    eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+    eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
+    eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
+  };
+
+  struct PipelineColorBlendAdvancedStateCreateInfoEXT
+  {
+    PipelineColorBlendAdvancedStateCreateInfoEXT( Bool32 srcPremultiplied_ = 0, Bool32 dstPremultiplied_ = 0, BlendOverlapEXT blendOverlap_ = BlendOverlapEXT::eUncorrelated )
+      : sType( StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT )
+      , pNext( nullptr )
+      , srcPremultiplied( srcPremultiplied_ )
+      , dstPremultiplied( dstPremultiplied_ )
+      , blendOverlap( blendOverlap_ )
+    {
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) );
+      return *this;
+    }
+    PipelineColorBlendAdvancedStateCreateInfoEXT& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT& setSrcPremultiplied( Bool32 srcPremultiplied_ )
+    {
+      srcPremultiplied = srcPremultiplied_;
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT& setDstPremultiplied( Bool32 dstPremultiplied_ )
+    {
+      dstPremultiplied = dstPremultiplied_;
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT& setBlendOverlap( BlendOverlapEXT blendOverlap_ )
+    {
+      blendOverlap = blendOverlap_;
+      return *this;
+    }
+
+    operator const VkPipelineColorBlendAdvancedStateCreateInfoEXT&() const
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>(this);
+    }
+
+    bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcPremultiplied == rhs.srcPremultiplied )
+          && ( dstPremultiplied == rhs.dstPremultiplied )
+          && ( blendOverlap == rhs.blendOverlap );
+    }
+
+    bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    Bool32 srcPremultiplied;
+    Bool32 dstPremultiplied;
+    BlendOverlapEXT blendOverlap;
+  };
+  static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
+
+  enum class CoverageModulationModeNV
+  {
+    eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+    eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
+    eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
+    eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
+  };
+
+  struct PipelineCoverageModulationStateCreateInfoNV
+  {
+    PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateFlagsNV flags_ = PipelineCoverageModulationStateCreateFlagsNV(), CoverageModulationModeNV coverageModulationMode_ = CoverageModulationModeNV::eNone, Bool32 coverageModulationTableEnable_ = 0, uint32_t coverageModulationTableCount_ = 0, const float* pCoverageModulationTable_ = nullptr )
+      : sType( StructureType::ePipelineCoverageModulationStateCreateInfoNV )
+      , pNext( nullptr )
+      , flags( flags_ )
+      , coverageModulationMode( coverageModulationMode_ )
+      , coverageModulationTableEnable( coverageModulationTableEnable_ )
+      , coverageModulationTableCount( coverageModulationTableCount_ )
+      , pCoverageModulationTable( pCoverageModulationTable_ )
+    {
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) );
+      return *this;
+    }
+    PipelineCoverageModulationStateCreateInfoNV& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV& setFlags( PipelineCoverageModulationStateCreateFlagsNV flags_ )
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationMode( CoverageModulationModeNV coverageModulationMode_ )
+    {
+      coverageModulationMode = coverageModulationMode_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableEnable( Bool32 coverageModulationTableEnable_ )
+    {
+      coverageModulationTableEnable = coverageModulationTableEnable_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ )
+    {
+      coverageModulationTableCount = coverageModulationTableCount_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV& setPCoverageModulationTable( const float* pCoverageModulationTable_ )
+    {
+      pCoverageModulationTable = pCoverageModulationTable_;
+      return *this;
+    }
+
+    operator const VkPipelineCoverageModulationStateCreateInfoNV&() const
+    {
+      return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>(this);
+    }
+
+    bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageModulationMode == rhs.coverageModulationMode )
+          && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable )
+          && ( coverageModulationTableCount == rhs.coverageModulationTableCount )
+          && ( pCoverageModulationTable == rhs.pCoverageModulationTable );
+    }
+
+    bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    PipelineCoverageModulationStateCreateFlagsNV flags;
+    CoverageModulationModeNV coverageModulationMode;
+    Bool32 coverageModulationTableEnable;
+    uint32_t coverageModulationTableCount;
+    const float* pCoverageModulationTable;
+  };
+  static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
+
+  Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator = std::allocator<LayerProperties>> 
+  typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties();
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
+  {
+    return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
+  {
+    std::vector<LayerProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() );
+    properties.resize( propertyCount );
+    return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator = std::allocator<ExtensionProperties>> 
+  typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
+  {
+    return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName )
+  {
+    std::vector<ExtensionProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() );
+    properties.resize( propertyCount );
+    return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  // forward declarations
+  struct CmdProcessCommandsInfoNVX;
+
+  class CommandBuffer
+  {
+  public:
+    CommandBuffer()
+      : m_commandBuffer(VK_NULL_HANDLE)
+    {}
+
+    CommandBuffer( std::nullptr_t )
+      : m_commandBuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer )
+       : m_commandBuffer( commandBuffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandBuffer & operator=(VkCommandBuffer commandBuffer)
+    {
+      m_commandBuffer = commandBuffer;
+      return *this; 
+    }
+#endif
+
+    CommandBuffer & operator=( std::nullptr_t )
+    {
+      m_commandBuffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( CommandBuffer const & rhs ) const
+    {
+      return m_commandBuffer == rhs.m_commandBuffer;
+    }
+
+    bool operator!=(CommandBuffer const & rhs ) const
+    {
+      return m_commandBuffer != rhs.m_commandBuffer;
+    }
+
+    bool operator<(CommandBuffer const & rhs ) const
+    {
+      return m_commandBuffer < rhs.m_commandBuffer;
+    }
+
+    Result begin( const CommandBufferBeginInfo* pBeginInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result end() const;
+#else
+    ResultValueType<void>::type end() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result reset( CommandBufferResetFlags flags ) const;
+#else
+    ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const;
+
+    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void setLineWidth( float lineWidth ) const;
+
+    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const;
+
+    void setBlendConstants( const float blendConstants[4] ) const;
+
+    void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const;
+
+    void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const;
+
+    void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const;
+
+    void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const;
+
+    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const;
+
+    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const;
+
+    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const;
+
+    void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const;
+
+    void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const;
+
+    void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const;
+
+    void dispatchIndirect( Buffer buffer, DeviceSize offset ) const;
+
+    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T>
+    void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const;
+
+    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void setEvent( Event event, PipelineStageFlags stageMask ) const;
+
+    void resetEvent( Event event, PipelineStageFlags stageMask ) const;
+
+    void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const;
+
+    void endQuery( QueryPool queryPool, uint32_t query ) const;
+
+    void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const;
+
+    void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const;
+
+    void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const;
+
+    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T>
+    void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void nextSubpass( SubpassContents contents ) const;
+
+    void endRenderPass() const;
+
+    void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void debugMarkerEndEXT() const;
+
+    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const;
+
+    void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const;
+
+    void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy<const WriteDescriptorSet> descriptorWrites ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void setDeviceMaskKHX( uint32_t deviceMask ) const;
+
+    void dispatchBaseKHX( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const;
+
+    void pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplateKHR descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData ) const;
+
+    void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const ViewportWScalingNV> viewportWScalings ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const Rect2D> discardRectangles ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const
+    {
+      return m_commandBuffer;
+    }
+
+    explicit operator bool() const
+    {
+      return m_commandBuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_commandBuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandBuffer m_commandBuffer;
+  };
+
+  static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+
+  VULKAN_HPP_INLINE Result CommandBuffer::begin( const CommandBufferBeginInfo* pBeginInfo ) const
+  {
+    return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo ) const
+  {
+    Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
+    return createResultValue( result, "vk::CommandBuffer::begin" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result CommandBuffer::end() const
+  {
+    return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::end() const
+  {
+    Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
+    return createResultValue( result, "vk::CommandBuffer::end" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result CommandBuffer::reset( CommandBufferResetFlags flags ) const
+  {
+    return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::reset( CommandBufferResetFlags flags ) const
+  {
+    Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+    return createResultValue( result, "vk::CommandBuffer::reset" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
+  {
+    vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
+  {
+    vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
+  {
+    vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
+  {
+    vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
+  {
+    vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const
+  {
+    vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
+  {
+    vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const
+  {
+    vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
+  {
+    vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
+  {
+    vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
+  {
+    vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
+  {
+    vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
+  {
+    vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
+  {
+    vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
+  {
+    vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
+  {
+    vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( buffers.size() == offsets.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+    {
+      throw LogicError( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+    }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+    vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
+  {
+    vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
+  {
+    vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+  {
+    vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+  {
+    vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
+  {
+    vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset ) const
+  {
+    vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
+  {
+    vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
+  {
+    vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
+  {
+    vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
+  {
+    vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
+  {
+    vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
+  {
+    vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
+  {
+    vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
+  {
+    vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
+  {
+    vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
+  {
+    vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
+  {
+    vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
+  {
+    vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
+  {
+    vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
+  {
+    vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
+  {
+    vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
+  {
+    vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
+  {
+    vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
+  {
+    vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
+  {
+    vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
+  {
+    vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
+  {
+    vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask ) const
+  {
+    vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask ) const
+  {
+    vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
+  {
+    vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
+  {
+    vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
+  {
+    vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
+  {
+    vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
+  {
+    vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query ) const
+  {
+    vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
+  {
+    vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
+  {
+    vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
+  {
+    vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
+  {
+    vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
+  {
+    vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
+  {
+    vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
+  {
+    vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents ) const
+  {
+    vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const
+  {
+    vkCmdEndRenderPass( m_commandBuffer );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
+  {
+    vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
+  {
+    vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+  {
+    vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo ) const
+  {
+    vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const
+  {
+    vkCmdDebugMarkerEndEXT( m_commandBuffer );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
+  {
+    vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo ) const
+  {
+    vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+  {
+    vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
+  {
+    vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const
+  {
+    vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const
+  {
+    vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const
+  {
+    vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const
+  {
+    vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites ) const
+  {
+    vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy<const WriteDescriptorSet> descriptorWrites ) const
+  {
+    vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHX( uint32_t deviceMask ) const
+  {
+    vkCmdSetDeviceMaskKHX( m_commandBuffer, deviceMask );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHX( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
+  {
+    vkCmdDispatchBaseKHX( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplateKHR descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData ) const
+  {
+    vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplateKHR>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+  }
+
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings ) const
+  {
+    vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV*>( pViewportWScalings ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const ViewportWScalingNV> viewportWScalings ) const
+  {
+    vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast<const VkViewportWScalingNV*>( viewportWScalings.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles ) const
+  {
+    vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D*>( pDiscardRectangles ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const Rect2D> discardRectangles ) const
+  {
+    vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast<const VkRect2D*>( discardRectangles.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  struct SubmitInfo
+  {
+    SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
+      : sType( StructureType::eSubmitInfo )
+      , pNext( nullptr )
+      , waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , pWaitDstStageMask( pWaitDstStageMask_ )
+      , commandBufferCount( commandBufferCount_ )
+      , pCommandBuffers( pCommandBuffers_ )
+      , signalSemaphoreCount( signalSemaphoreCount_ )
+      , pSignalSemaphores( pSignalSemaphores_ )
+    {
+    }
+
+    SubmitInfo( VkSubmitInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SubmitInfo ) );
+    }
+
+    SubmitInfo& operator=( VkSubmitInfo const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( SubmitInfo ) );
+      return *this;
+    }
+    SubmitInfo& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+    SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
+    {
+      pWaitDstStageMask = pWaitDstStageMask_;
+      return *this;
+    }
+
+    SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
+    {
+      pCommandBuffers = pCommandBuffers_;
+      return *this;
+    }
+
+    SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+    operator const VkSubmitInfo&() const
+    {
+      return *reinterpret_cast<const VkSubmitInfo*>(this);
+    }
+
+    bool operator==( SubmitInfo const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBuffers == rhs.pCommandBuffers )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+    }
+
+    bool operator!=( SubmitInfo const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    const Semaphore* pWaitSemaphores;
+    const PipelineStageFlags* pWaitDstStageMask;
+    uint32_t commandBufferCount;
+    const CommandBuffer* pCommandBuffers;
+    uint32_t signalSemaphoreCount;
+    const Semaphore* pSignalSemaphores;
+  };
+  static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+
+  class Queue
+  {
+  public:
+    Queue()
+      : m_queue(VK_NULL_HANDLE)
+    {}
+
+    Queue( std::nullptr_t )
+      : m_queue(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue )
+       : m_queue( queue )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Queue & operator=(VkQueue queue)
+    {
+      m_queue = queue;
+      return *this; 
+    }
+#endif
+
+    Queue & operator=( std::nullptr_t )
+    {
+      m_queue = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Queue const & rhs ) const
+    {
+      return m_queue == rhs.m_queue;
+    }
+
+    bool operator!=(Queue const & rhs ) const
+    {
+      return m_queue != rhs.m_queue;
+    }
+
+    bool operator<(Queue const & rhs ) const
+    {
+      return m_queue < rhs.m_queue;
+    }
+
+    Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result waitIdle() const;
+#else
+    ResultValueType<void>::type waitIdle() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result presentKHR( const PresentInfoKHR* pPresentInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result presentKHR( const PresentInfoKHR & presentInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const
+    {
+      return m_queue;
+    }
+
+    explicit operator bool() const
+    {
+      return m_queue != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_queue == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueue m_queue;
+  };
+
+  static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+
+  VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
+  {
+    return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
+  {
+    Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, "vk::Queue::submit" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Queue::waitIdle() const
+  {
+    return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::waitIdle() const
+  {
+    Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
+    return createResultValue( result, "vk::Queue::waitIdle" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
+  {
+    return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
+  {
+    Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, "vk::Queue::bindSparse" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR* pPresentInfo ) const
+  {
+    return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo ) const
+  {
+    Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
+    return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class BufferDeleter;
+  using UniqueBuffer = UniqueHandle<Buffer, BufferDeleter>;
+  class BufferViewDeleter;
+  using UniqueBufferView = UniqueHandle<BufferView, BufferViewDeleter>;
+  class CommandBufferDeleter;
+  using UniqueCommandBuffer = UniqueHandle<CommandBuffer, CommandBufferDeleter>;
+  class CommandPoolDeleter;
+  using UniqueCommandPool = UniqueHandle<CommandPool, CommandPoolDeleter>;
+  class DescriptorPoolDeleter;
+  using UniqueDescriptorPool = UniqueHandle<DescriptorPool, DescriptorPoolDeleter>;
+  class DescriptorSetDeleter;
+  using UniqueDescriptorSet = UniqueHandle<DescriptorSet, DescriptorSetDeleter>;
+  class DescriptorSetLayoutDeleter;
+  using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, DescriptorSetLayoutDeleter>;
+  class DescriptorUpdateTemplateKHRDeleter;
+  using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplateKHR, DescriptorUpdateTemplateKHRDeleter>;
+  class DeviceMemoryDeleter;
+  using UniqueDeviceMemory = UniqueHandle<DeviceMemory, DeviceMemoryDeleter>;
+  class EventDeleter;
+  using UniqueEvent = UniqueHandle<Event, EventDeleter>;
+  class FenceDeleter;
+  using UniqueFence = UniqueHandle<Fence, FenceDeleter>;
+  class FramebufferDeleter;
+  using UniqueFramebuffer = UniqueHandle<Framebuffer, FramebufferDeleter>;
+  class ImageDeleter;
+  using UniqueImage = UniqueHandle<Image, ImageDeleter>;
+  class ImageViewDeleter;
+  using UniqueImageView = UniqueHandle<ImageView, ImageViewDeleter>;
+  class IndirectCommandsLayoutNVXDeleter;
+  using UniqueIndirectCommandsLayoutNVX = UniqueHandle<IndirectCommandsLayoutNVX, IndirectCommandsLayoutNVXDeleter>;
+  class ObjectTableNVXDeleter;
+  using UniqueObjectTableNVX = UniqueHandle<ObjectTableNVX, ObjectTableNVXDeleter>;
+  class PipelineDeleter;
+  using UniquePipeline = UniqueHandle<Pipeline, PipelineDeleter>;
+  class PipelineCacheDeleter;
+  using UniquePipelineCache = UniqueHandle<PipelineCache, PipelineCacheDeleter>;
+  class PipelineLayoutDeleter;
+  using UniquePipelineLayout = UniqueHandle<PipelineLayout, PipelineLayoutDeleter>;
+  class QueryPoolDeleter;
+  using UniqueQueryPool = UniqueHandle<QueryPool, QueryPoolDeleter>;
+  class RenderPassDeleter;
+  using UniqueRenderPass = UniqueHandle<RenderPass, RenderPassDeleter>;
+  class SamplerDeleter;
+  using UniqueSampler = UniqueHandle<Sampler, SamplerDeleter>;
+  class SemaphoreDeleter;
+  using UniqueSemaphore = UniqueHandle<Semaphore, SemaphoreDeleter>;
+  class ShaderModuleDeleter;
+  using UniqueShaderModule = UniqueHandle<ShaderModule, ShaderModuleDeleter>;
+  class SwapchainKHRDeleter;
+  using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, SwapchainKHRDeleter>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Device
+  {
+  public:
+    Device()
+      : m_device(VK_NULL_HANDLE)
+    {}
+
+    Device( std::nullptr_t )
+      : m_device(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device )
+       : m_device( device )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Device & operator=(VkDevice device)
+    {
+      m_device = device;
+      return *this; 
+    }
+#endif
+
+    Device & operator=( std::nullptr_t )
+    {
+      m_device = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Device const & rhs ) const
+    {
+      return m_device == rhs.m_device;
+    }
+
+    bool operator!=(Device const & rhs ) const
+    {
+      return m_device != rhs.m_device;
+    }
+
+    bool operator<(Device const & rhs ) const
+    {
+      return m_device < rhs.m_device;
+    }
+
+    PFN_vkVoidFunction getProcAddr( const char* pName ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PFN_vkVoidFunction getProcAddr( const std::string & name ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroy( const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result waitIdle() const;
+#else
+    ResultValueType<void>::type waitIdle() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueDeviceMemory allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void*>::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void unmapMemory( DeviceMemory memory ) const;
+
+    Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    DeviceSize getMemoryCommitment( DeviceMemory memory ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    MemoryRequirements getBufferMemoryRequirements( Buffer buffer ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const;
+#else
+    ResultValueType<void>::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    MemoryRequirements getImageMemoryRequirements( Image image ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const;
+#else
+    ResultValueType<void>::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<SparseImageMemoryRequirements>> 
+    std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( Image image ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueFence createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyFence( Fence fence, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result resetFences( uint32_t fenceCount, const Fence* pFences ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type resetFences( ArrayProxy<const Fence> fences ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getFenceStatus( Fence fence ) const;
+
+    Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSemaphore createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueEvent createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyEvent( Event event, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyEvent( Event event, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getEventStatus( Event event ) const;
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result setEvent( Event event ) const;
+#else
+    ResultValueType<void>::type setEvent( Event event ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result resetEvent( Event event ) const;
+#else
+    ResultValueType<void>::type resetEvent( Event event ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueQueryPool createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename T>
+    Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueBuffer createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueBufferView createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueImage createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyImage( Image image, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyImage( Image image, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueImageView createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueShaderModule createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniquePipelineCache createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<uint8_t>> 
+    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( PipelineCache pipelineCache ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<Pipeline>> 
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+    ResultValueType<Pipeline>::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Allocator = std::allocator<Pipeline>> 
+    std::vector<UniquePipeline> createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+    UniquePipeline createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<Pipeline>> 
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+    ResultValueType<Pipeline>::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Allocator = std::allocator<Pipeline>> 
+    std::vector<UniquePipeline> createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+    UniquePipeline createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniquePipelineLayout createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSampler createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueDescriptorSetLayout createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueDescriptorPool createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const;
+#else
+    ResultValueType<void>::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<DescriptorSet>> 
+    typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Allocator = std::allocator<DescriptorSet>> 
+    std::vector<UniqueDescriptorSet> allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueFramebuffer createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueRenderPass createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Extent2D getRenderAreaGranularity( RenderPass renderPass ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueCommandPool createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const;
+#else
+    ResultValueType<void>::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<CommandBuffer>> 
+    typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Allocator = std::allocator<CommandBuffer>> 
+    std::vector<UniqueCommandBuffer> allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<SwapchainKHR>> 
+    typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+    ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template <typename Allocator = std::allocator<SwapchainKHR>> 
+    std::vector<UniqueSwapchainKHR> createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+    UniqueSwapchainKHR createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSwapchainKHR createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<Image>> 
+    typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( SwapchainKHR swapchain ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValue<uint32_t> acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<HANDLE>::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueIndirectCommandsLayoutNVX createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueObjectTableNVX createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlagsKHR flags ) const;
+#else
+    void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlagsKHR flags = CommandPoolTrimFlagsKHR() ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    Result getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, int fd ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    Result getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    Result getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, const AllocationCallbacks & allocator ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Fence>::type registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, const AllocationCallbacks & allocator ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValue<uint64_t> getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getGroupPeerMemoryFeaturesKHX( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PeerMemoryFeatureFlagsKHX getGroupPeerMemoryFeaturesKHX( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result bindBufferMemory2KHX( uint32_t bindInfoCount, const BindBufferMemoryInfoKHX* pBindInfos ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type bindBufferMemory2KHX( ArrayProxy<const BindBufferMemoryInfoKHX> bindInfos ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result bindImageMemory2KHX( uint32_t bindInfoCount, const BindImageMemoryInfoKHX* pBindInfos ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<void>::type bindImageMemory2KHX( ArrayProxy<const BindImageMemoryInfoKHX> bindInfos ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getGroupPresentCapabilitiesKHX( DeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DeviceGroupPresentCapabilitiesKHX>::type getGroupPresentCapabilitiesKHX() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getGroupSurfacePresentModesKHX( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHX* pModes ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DeviceGroupPresentModeFlagsKHX>::type getGroupSurfacePresentModesKHX( SurfaceKHR surface ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result acquireNextImage2KHX( const AcquireNextImageInfoKHX* pAcquireInfo, uint32_t* pImageIndex ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValue<uint32_t> acquireNextImage2KHX( const AcquireNextImageInfoKHX & acquireInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DescriptorUpdateTemplateKHR>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueDescriptorUpdateTemplateKHR createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplateKHR descriptorUpdateTemplate, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplateKHR descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData ) const;
+
+    void setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void setHdrMetadataEXT( ArrayProxy<const SwapchainKHR> swapchains, ArrayProxy<const HdrMetadataEXT> metadata ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSwapchainStatusKHR( SwapchainKHR swapchain ) const;
+
+    Result getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<PastPresentationTimingGOOGLE>> 
+    typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( SwapchainKHR swapchain ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2KHR* pInfo, MemoryRequirements2KHR* pMemoryRequirements ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    MemoryRequirements2KHR getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2KHR & info ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2KHR* pInfo, MemoryRequirements2KHR* pMemoryRequirements ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    MemoryRequirements2KHR getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2KHR & info ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2KHR* pSparseMemoryRequirements ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<SparseImageMemoryRequirements2KHR>> 
+    std::vector<SparseImageMemoryRequirements2KHR,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2KHR & info ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const
+    {
+      return m_device;
+    }
+
+    explicit operator bool() const
+    {
+      return m_device != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_device == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDevice m_device;
+  };
+
+  static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class BufferDeleter
+  {
+  public:
+    BufferDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Buffer buffer )
+    {
+      m_device.destroyBuffer( buffer, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class BufferViewDeleter
+  {
+  public:
+    BufferViewDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( BufferView bufferView )
+    {
+      m_device.destroyBufferView( bufferView, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class CommandBufferDeleter
+  {
+  public:
+    CommandBufferDeleter( Device device = Device(), CommandPool commandPool = CommandPool() )
+      : m_device( device )
+      , m_commandPool( commandPool )
+    {}
+
+    void operator()( CommandBuffer commandBuffer )
+    {
+      m_device.freeCommandBuffers( m_commandPool, commandBuffer );
+    }
+
+  private:
+    Device m_device;
+    CommandPool m_commandPool;
+  };
+
+  class CommandPoolDeleter
+  {
+  public:
+    CommandPoolDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( CommandPool commandPool )
+    {
+      m_device.destroyCommandPool( commandPool, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class DescriptorPoolDeleter
+  {
+  public:
+    DescriptorPoolDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( DescriptorPool descriptorPool )
+    {
+      m_device.destroyDescriptorPool( descriptorPool, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class DescriptorSetDeleter
+  {
+  public:
+    DescriptorSetDeleter( Device device = Device(), DescriptorPool descriptorPool = DescriptorPool() )
+      : m_device( device )
+      , m_descriptorPool( descriptorPool )
+    {}
+
+    void operator()( DescriptorSet descriptorSet )
+    {
+      m_device.freeDescriptorSets( m_descriptorPool, descriptorSet );
+    }
+
+  private:
+    Device m_device;
+    DescriptorPool m_descriptorPool;
+  };
+
+  class DescriptorSetLayoutDeleter
+  {
+  public:
+    DescriptorSetLayoutDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( DescriptorSetLayout descriptorSetLayout )
+    {
+      m_device.destroyDescriptorSetLayout( descriptorSetLayout, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class DescriptorUpdateTemplateKHRDeleter
+  {
+  public:
+    DescriptorUpdateTemplateKHRDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( DescriptorUpdateTemplateKHR descriptorUpdateTemplateKHR )
+    {
+      m_device.destroyDescriptorUpdateTemplateKHR( descriptorUpdateTemplateKHR, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class DeviceMemoryDeleter
+  {
+  public:
+    DeviceMemoryDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( DeviceMemory deviceMemory )
+    {
+      m_device.freeMemory( deviceMemory, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class EventDeleter
+  {
+  public:
+    EventDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Event event )
+    {
+      m_device.destroyEvent( event, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class FenceDeleter
+  {
+  public:
+    FenceDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Fence fence )
+    {
+      m_device.destroyFence( fence, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class FramebufferDeleter
+  {
+  public:
+    FramebufferDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Framebuffer framebuffer )
+    {
+      m_device.destroyFramebuffer( framebuffer, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class ImageDeleter
+  {
+  public:
+    ImageDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Image image )
+    {
+      m_device.destroyImage( image, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class ImageViewDeleter
+  {
+  public:
+    ImageViewDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( ImageView imageView )
+    {
+      m_device.destroyImageView( imageView, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class IndirectCommandsLayoutNVXDeleter
+  {
+  public:
+    IndirectCommandsLayoutNVXDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( IndirectCommandsLayoutNVX indirectCommandsLayoutNVX )
+    {
+      m_device.destroyIndirectCommandsLayoutNVX( indirectCommandsLayoutNVX, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class ObjectTableNVXDeleter
+  {
+  public:
+    ObjectTableNVXDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( ObjectTableNVX objectTableNVX )
+    {
+      m_device.destroyObjectTableNVX( objectTableNVX, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class PipelineDeleter
+  {
+  public:
+    PipelineDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Pipeline pipeline )
+    {
+      m_device.destroyPipeline( pipeline, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class PipelineCacheDeleter
+  {
+  public:
+    PipelineCacheDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( PipelineCache pipelineCache )
+    {
+      m_device.destroyPipelineCache( pipelineCache, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class PipelineLayoutDeleter
+  {
+  public:
+    PipelineLayoutDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( PipelineLayout pipelineLayout )
+    {
+      m_device.destroyPipelineLayout( pipelineLayout, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class QueryPoolDeleter
+  {
+  public:
+    QueryPoolDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( QueryPool queryPool )
+    {
+      m_device.destroyQueryPool( queryPool, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class RenderPassDeleter
+  {
+  public:
+    RenderPassDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( RenderPass renderPass )
+    {
+      m_device.destroyRenderPass( renderPass, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class SamplerDeleter
+  {
+  public:
+    SamplerDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Sampler sampler )
+    {
+      m_device.destroySampler( sampler, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class SemaphoreDeleter
+  {
+  public:
+    SemaphoreDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( Semaphore semaphore )
+    {
+      m_device.destroySemaphore( semaphore, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class ShaderModuleDeleter
+  {
+  public:
+    ShaderModuleDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( ShaderModule shaderModule )
+    {
+      m_device.destroyShaderModule( shaderModule, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class SwapchainKHRDeleter
+  {
+  public:
+    SwapchainKHRDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_device( device )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( SwapchainKHR swapchainKHR )
+    {
+      m_device.destroySwapchainKHR( swapchainKHR, m_allocator );
+    }
+
+  private:
+    Device m_device;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName ) const
+  {
+    return vkGetDeviceProcAddr( m_device, pName );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const
+  {
+    return vkGetDeviceProcAddr( m_device, name.c_str() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroy( const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue ) const
+  {
+    vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const
+  {
+    Queue queue;
+    vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
+    return queue;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::waitIdle() const
+  {
+    return static_cast<Result>( vkDeviceWaitIdle( m_device ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::waitIdle() const
+  {
+    Result result = static_cast<Result>( vkDeviceWaitIdle( m_device ) );
+    return createResultValue( result, "vk::Device::waitIdle" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory ) const
+  {
+    return static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DeviceMemory memory;
+    Result result = static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
+    return createResultValue( result, memory, "vk::Device::allocateMemory" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueDeviceMemory Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DeviceMemoryDeleter deleter( *this, allocator );
+    return UniqueDeviceMemory( allocateMemory( allocateInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator ) const
+  {
+    vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData ) const
+  {
+    return static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), ppData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void*>::type Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags ) const
+  {
+    void* pData;
+    Result result = static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), &pData ) );
+    return createResultValue( result, pData, "vk::Device::mapMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory ) const
+  {
+    vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+  }
+
+  VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
+  {
+    return static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
+  {
+    Result result = static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
+    return createResultValue( result, "vk::Device::flushMappedMemoryRanges" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
+  {
+    return static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
+  {
+    Result result = static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
+    return createResultValue( result, "vk::Device::invalidateMappedMemoryRanges" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes ) const
+  {
+    vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), pCommittedMemoryInBytes );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE DeviceSize Device::getMemoryCommitment( DeviceMemory memory ) const
+  {
+    DeviceSize committedMemoryInBytes;
+    vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), &committedMemoryInBytes );
+    return committedMemoryInBytes;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements ) const
+  {
+    vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE MemoryRequirements Device::getBufferMemoryRequirements( Buffer buffer ) const
+  {
+    MemoryRequirements memoryRequirements;
+    vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
+  {
+    return static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
+  {
+    Result result = static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+    return createResultValue( result, "vk::Device::bindBufferMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements ) const
+  {
+    vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE MemoryRequirements Device::getImageMemoryRequirements( Image image ) const
+  {
+    MemoryRequirements memoryRequirements;
+    vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
+  {
+    return static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
+  {
+    Result result = static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
+    return createResultValue( result, "vk::Device::bindImageMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements ) const
+  {
+    vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( Image image ) const
+  {
+    std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
+  {
+    return static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Fence fence;
+    Result result = static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+    return createResultValue( result, fence, "vk::Device::createFence" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueFence Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    FenceDeleter deleter( *this, allocator );
+    return UniqueFence( createFence( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const Fence* pFences ) const
+  {
+    return static_cast<Result>( vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetFences( ArrayProxy<const Fence> fences ) const
+  {
+    Result result = static_cast<Result>( vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
+    return createResultValue( result, "vk::Device::resetFences" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence ) const
+  {
+    return static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence ) const
+  {
+    Result result = static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, "vk::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout ) const
+  {
+    return static_cast<Result>( vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), waitAll, timeout ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout ) const
+  {
+    Result result = static_cast<Result>( vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), waitAll, timeout ) );
+    return createResultValue( result, "vk::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore ) const
+  {
+    return static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Semaphore semaphore;
+    Result result = static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
+    return createResultValue( result, semaphore, "vk::Device::createSemaphore" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSemaphore Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SemaphoreDeleter deleter( *this, allocator );
+    return UniqueSemaphore( createSemaphore( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent ) const
+  {
+    return static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Event event;
+    Result result = static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
+    return createResultValue( result, event, "vk::Device::createEvent" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueEvent Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    EventDeleter deleter( *this, allocator );
+    return UniqueEvent( createEvent( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyEvent( Event event, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyEvent( Event event, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::getEventStatus( Event event ) const
+  {
+    return static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE Result Device::getEventStatus( Event event ) const
+  {
+    Result result = static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, "vk::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::setEvent( Event event ) const
+  {
+    return static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::setEvent( Event event ) const
+  {
+    Result result = static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, "vk::Device::setEvent" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::resetEvent( Event event ) const
+  {
+    return static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetEvent( Event event ) const
+  {
+    Result result = static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, "vk::Device::resetEvent" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool ) const
+  {
+    return static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    QueryPool queryPool;
+    Result result = static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
+    return createResultValue( result, queryPool, "vk::Device::createQueryPool" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueQueryPool Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    QueryPoolDeleter deleter( *this, allocator );
+    return UniqueQueryPool( createQueryPool( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags ) const
+  {
+    return static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast<VkQueryResultFlags>( flags ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename T>
+  VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags ) const
+  {
+    Result result = static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride, static_cast<VkQueryResultFlags>( flags ) ) );
+    return createResultValue( result, "vk::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer ) const
+  {
+    return static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Buffer buffer;
+    Result result = static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
+    return createResultValue( result, buffer, "vk::Device::createBuffer" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueBuffer Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    BufferDeleter deleter( *this, allocator );
+    return UniqueBuffer( createBuffer( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView ) const
+  {
+    return static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    BufferView view;
+    Result result = static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
+    return createResultValue( result, view, "vk::Device::createBufferView" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueBufferView Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    BufferViewDeleter deleter( *this, allocator );
+    return UniqueBufferView( createBufferView( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage ) const
+  {
+    return static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Image image;
+    Result result = static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
+    return createResultValue( result, image, "vk::Device::createImage" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueImage Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    ImageDeleter deleter( *this, allocator );
+    return UniqueImage( createImage( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyImage( Image image, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyImage( Image image, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout ) const
+  {
+    vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE SubresourceLayout Device::getImageSubresourceLayout( Image image, const ImageSubresource & subresource ) const
+  {
+    SubresourceLayout layout;
+    vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
+    return layout;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView ) const
+  {
+    return static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    ImageView view;
+    Result result = static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
+    return createResultValue( result, view, "vk::Device::createImageView" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueImageView Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    ImageViewDeleter deleter( *this, allocator );
+    return UniqueImageView( createImageView( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule ) const
+  {
+    return static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    ShaderModule shaderModule;
+    Result result = static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
+    return createResultValue( result, shaderModule, "vk::Device::createShaderModule" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueShaderModule Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    ShaderModuleDeleter deleter( *this, allocator );
+    return UniqueShaderModule( createShaderModule( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache ) const
+  {
+    return static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineCache pipelineCache;
+    Result result = static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+    return createResultValue( result, pipelineCache, "vk::Device::createPipelineCache" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniquePipelineCache Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineCacheDeleter deleter( *this, allocator );
+    return UniquePipelineCache( createPipelineCache( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData ) const
+  {
+    return static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( PipelineCache pipelineCache ) const
+  {
+    std::vector<uint8_t,Allocator> data;
+    size_t dataSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( dataSize <= data.size() );
+    data.resize( dataSize );
+    return createResultValue( result, data, "vk::Device::getPipelineCacheData" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches ) const
+  {
+    return static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches ) const
+  {
+    Result result = static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
+    return createResultValue( result, "vk::Device::mergePipelineCaches" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
+  {
+    return static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, "vk::Device::createGraphicsPipelines" );
+  }
+  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, "vk::Device::createGraphicsPipeline" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<UniquePipeline> Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineDeleter deleter( *this, allocator );
+    std::vector<Pipeline,Allocator> pipelines = createGraphicsPipelines( pipelineCache, createInfos, allocator );
+    std::vector<UniquePipeline> uniquePipelines;
+    uniquePipelines.reserve( pipelines.size() );
+    for ( auto pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniquePipeline( pipeline, deleter ) );
+    }
+    return uniquePipelines;
+  }
+  VULKAN_HPP_INLINE UniquePipeline Device::createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineDeleter deleter( *this, allocator );
+    return UniquePipeline( createGraphicsPipeline( pipelineCache, createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
+  {
+    return static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, "vk::Device::createComputePipelines" );
+  }
+  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, "vk::Device::createComputePipeline" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<UniquePipeline> Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineDeleter deleter( *this, allocator );
+    std::vector<Pipeline,Allocator> pipelines = createComputePipelines( pipelineCache, createInfos, allocator );
+    std::vector<UniquePipeline> uniquePipelines;
+    uniquePipelines.reserve( pipelines.size() );
+    for ( auto pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniquePipeline( pipeline, deleter ) );
+    }
+    return uniquePipelines;
+  }
+  VULKAN_HPP_INLINE UniquePipeline Device::createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineDeleter deleter( *this, allocator );
+    return UniquePipeline( createComputePipeline( pipelineCache, createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout ) const
+  {
+    return static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineLayout pipelineLayout;
+    Result result = static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+    return createResultValue( result, pipelineLayout, "vk::Device::createPipelineLayout" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniquePipelineLayout Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    PipelineLayoutDeleter deleter( *this, allocator );
+    return UniquePipelineLayout( createPipelineLayout( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler ) const
+  {
+    return static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Sampler sampler;
+    Result result = static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
+    return createResultValue( result, sampler, "vk::Device::createSampler" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSampler Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SamplerDeleter deleter( *this, allocator );
+    return UniqueSampler( createSampler( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout ) const
+  {
+    return static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DescriptorSetLayout setLayout;
+    Result result = static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
+    return createResultValue( result, setLayout, "vk::Device::createDescriptorSetLayout" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueDescriptorSetLayout Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DescriptorSetLayoutDeleter deleter( *this, allocator );
+    return UniqueDescriptorSetLayout( createDescriptorSetLayout( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool ) const
+  {
+    return static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DescriptorPool descriptorPool;
+    Result result = static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
+    return createResultValue( result, descriptorPool, "vk::Device::createDescriptorPool" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueDescriptorPool Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DescriptorPoolDeleter deleter( *this, allocator );
+    return UniqueDescriptorPool( createDescriptorPool( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const
+  {
+    return static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const
+  {
+    Result result = static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+    return createResultValue( result, "vk::Device::resetDescriptorPool" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets ) const
+  {
+    return static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo ) const
+  {
+    std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
+    Result result = static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, descriptorSets, "vk::Device::allocateDescriptorSets" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<UniqueDescriptorSet> Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo ) const
+  {
+    DescriptorSetDeleter deleter( *this, allocateInfo.descriptorPool );
+    std::vector<DescriptorSet,Allocator> descriptorSets = allocateDescriptorSets( allocateInfo );
+    std::vector<UniqueDescriptorSet> uniqueDescriptorSets;
+    uniqueDescriptorSets.reserve( descriptorSets.size() );
+    for ( auto descriptorSet : descriptorSets )
+    {
+      uniqueDescriptorSets.push_back( UniqueDescriptorSet( descriptorSet, deleter ) );
+    }
+    return uniqueDescriptorSets;
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets ) const
+  {
+    return static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets ) const
+  {
+    Result result = static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, "vk::Device::freeDescriptorSets" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies ) const
+  {
+    vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies ) const
+  {
+    vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer ) const
+  {
+    return static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Framebuffer framebuffer;
+    Result result = static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
+    return createResultValue( result, framebuffer, "vk::Device::createFramebuffer" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueFramebuffer Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    FramebufferDeleter deleter( *this, allocator );
+    return UniqueFramebuffer( createFramebuffer( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass ) const
+  {
+    return static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    RenderPass renderPass;
+    Result result = static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+    return createResultValue( result, renderPass, "vk::Device::createRenderPass" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueRenderPass Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    RenderPassDeleter deleter( *this, allocator );
+    return UniqueRenderPass( createRenderPass( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity ) const
+  {
+    vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Extent2D Device::getRenderAreaGranularity( RenderPass renderPass ) const
+  {
+    Extent2D granularity;
+    vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
+    return granularity;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool ) const
+  {
+    return static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    CommandPool commandPool;
+    Result result = static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
+    return createResultValue( result, commandPool, "vk::Device::createCommandPool" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueCommandPool Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    CommandPoolDeleter deleter( *this, allocator );
+    return UniqueCommandPool( createCommandPool( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
+  {
+    return static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
+  {
+    Result result = static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+    return createResultValue( result, "vk::Device::resetCommandPool" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers ) const
+  {
+    return static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo ) const
+  {
+    std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
+    Result result = static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+    return createResultValue( result, commandBuffers, "vk::Device::allocateCommandBuffers" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<UniqueCommandBuffer> Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo ) const
+  {
+    CommandBufferDeleter deleter( *this, allocateInfo.commandPool );
+    std::vector<CommandBuffer,Allocator> commandBuffers = allocateCommandBuffers( allocateInfo );
+    std::vector<UniqueCommandBuffer> uniqueCommandBuffers;
+    uniqueCommandBuffers.reserve( commandBuffers.size() );
+    for ( auto commandBuffer : commandBuffers )
+    {
+      uniqueCommandBuffers.push_back( UniqueCommandBuffer( commandBuffer, deleter ) );
+    }
+    return uniqueCommandBuffers;
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
+  {
+    vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers ) const
+  {
+    vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains ) const
+  {
+    return static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator ) const
+  {
+    std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
+    Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+    return createResultValue( result, swapchains, "vk::Device::createSharedSwapchainsKHR" );
+  }
+  VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SwapchainKHR swapchain;
+    Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+    return createResultValue( result, swapchain, "vk::Device::createSharedSwapchainKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<UniqueSwapchainKHR> Device::createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SwapchainKHRDeleter deleter( *this, allocator );
+    std::vector<SwapchainKHR,Allocator> swapchainKHRs = createSharedSwapchainsKHR( createInfos, allocator );
+    std::vector<UniqueSwapchainKHR> uniqueSwapchainKHRs;
+    uniqueSwapchainKHRs.reserve( swapchainKHRs.size() );
+    for ( auto swapchainKHR : swapchainKHRs )
+    {
+      uniqueSwapchainKHRs.push_back( UniqueSwapchainKHR( swapchainKHR, deleter ) );
+    }
+    return uniqueSwapchainKHRs;
+  }
+  VULKAN_HPP_INLINE UniqueSwapchainKHR Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SwapchainKHRDeleter deleter( *this, allocator );
+    return UniqueSwapchainKHR( createSharedSwapchainKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain ) const
+  {
+    return static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SwapchainKHR swapchain;
+    Result result = static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+    return createResultValue( result, swapchain, "vk::Device::createSwapchainKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSwapchainKHR Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SwapchainKHRDeleter deleter( *this, allocator );
+    return UniqueSwapchainKHR( createSwapchainKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages ) const
+  {
+    return static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain ) const
+  {
+    std::vector<Image,Allocator> swapchainImages;
+    uint32_t swapchainImageCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( swapchainImageCount <= swapchainImages.size() );
+    swapchainImages.resize( swapchainImageCount );
+    return createResultValue( result, swapchainImages, "vk::Device::getSwapchainImagesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex ) const
+  {
+    return static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence ) const
+  {
+    uint32_t imageIndex;
+    Result result = static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
+    return createResultValue( result, imageIndex, "vk::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo ) const
+  {
+    return static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo ) const
+  {
+    Result result = static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
+    return createResultValue( result, "vk::Device::debugMarkerSetObjectNameEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo ) const
+  {
+    return static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo ) const
+  {
+    Result result = static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
+    return createResultValue( result, "vk::Device::debugMarkerSetObjectTagEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const
+  {
+    return static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
+    return createResultValue( result, handle, "vk::Device::getMemoryWin32HandleNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const
+  {
+    return static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    IndirectCommandsLayoutNVX indirectCommandsLayout;
+    Result result = static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+    return createResultValue( result, indirectCommandsLayout, "vk::Device::createIndirectCommandsLayoutNVX" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueIndirectCommandsLayoutNVX Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    IndirectCommandsLayoutNVXDeleter deleter( *this, allocator );
+    return UniqueIndirectCommandsLayoutNVX( createIndirectCommandsLayoutNVX( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const
+  {
+    return static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    ObjectTableNVX objectTable;
+    Result result = static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+    return createResultValue( result, objectTable, "vk::Device::createObjectTableNVX" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueObjectTableNVX Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    ObjectTableNVXDeleter deleter( *this, allocator );
+    return UniqueObjectTableNVX( createObjectTableNVX( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const
+  {
+    return static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( pObjectTableEntries.size() == objectIndices.size() );
+#else
+    if ( pObjectTableEntries.size() != objectIndices.size() )
+    {
+      throw LogicError( "vk::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
+    }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+    Result result = static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
+    return createResultValue( result, "vk::Device::registerObjectsNVX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const
+  {
+    return static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( objectEntryTypes.size() == objectIndices.size() );
+#else
+    if ( objectEntryTypes.size() != objectIndices.size() )
+    {
+      throw LogicError( "vk::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
+    }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+    Result result = static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
+    return createResultValue( result, "vk::Device::unregisterObjectsNVX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlagsKHR flags ) const
+  {
+    vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlagsKHR>( flags ) );
+  }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const
+  {
+    return static_cast<Result>( vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, "vk::Device::getMemoryWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const
+  {
+    return static_cast<Result>( vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( pMemoryWin32HandleProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle ) const
+  {
+    MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+    Result result = static_cast<Result>( vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( &memoryWin32HandleProperties ) ) );
+    return createResultValue( result, memoryWin32HandleProperties, "vk::Device::getMemoryWin32HandlePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const
+  {
+    return static_cast<Result>( vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, "vk::Device::getMemoryFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties ) const
+  {
+    return static_cast<Result>( vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( pMemoryFdProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBitsKHR handleType, int fd ) const
+  {
+    MemoryFdPropertiesKHR memoryFdProperties;
+    Result result = static_cast<Result>( vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( &memoryFdProperties ) ) );
+    return createResultValue( result, memoryFdProperties, "vk::Device::getMemoryFdPropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const
+  {
+    return static_cast<Result>( vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, "vk::Device::getSemaphoreWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const
+  {
+    return static_cast<Result>( vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( pImportSemaphoreWin32HandleInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const
+  {
+    Result result = static_cast<Result>( vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( &importSemaphoreWin32HandleInfo ) ) );
+    return createResultValue( result, "vk::Device::importSemaphoreWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const
+  {
+    return static_cast<Result>( vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, "vk::Device::getSemaphoreFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const
+  {
+    return static_cast<Result>( vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( pImportSemaphoreFdInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const
+  {
+    Result result = static_cast<Result>( vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( &importSemaphoreFdInfo ) ) );
+    return createResultValue( result, "vk::Device::importSemaphoreFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const
+  {
+    return static_cast<Result>( vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, "vk::Device::getFenceWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const
+  {
+    return static_cast<Result>( vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( pImportFenceWin32HandleInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const
+  {
+    Result result = static_cast<Result>( vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( &importFenceWin32HandleInfo ) ) );
+    return createResultValue( result, "vk::Device::importFenceWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const
+  {
+    return static_cast<Result>( vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, "vk::Device::getFenceFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo ) const
+  {
+    return static_cast<Result>( vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( pImportFenceFdInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo ) const
+  {
+    Result result = static_cast<Result>( vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( &importFenceFdInfo ) ) );
+    return createResultValue( result, "vk::Device::importFenceFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo ) const
+  {
+    return static_cast<Result>( vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( pDisplayPowerInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo ) const
+  {
+    Result result = static_cast<Result>( vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( &displayPowerInfo ) ) );
+    return createResultValue( result, "vk::Device::displayPowerControlEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
+  {
+    return static_cast<Result>( vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, const AllocationCallbacks & allocator ) const
+  {
+    Fence fence;
+    Result result = static_cast<Result>( vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( &allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
+    return createResultValue( result, fence, "vk::Device::registerEventEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
+  {
+    return static_cast<Result>( vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, const AllocationCallbacks & allocator ) const
+  {
+    Fence fence;
+    Result result = static_cast<Result>( vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( &allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
+    return createResultValue( result, fence, "vk::Device::registerDisplayEventEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const
+  {
+    return static_cast<Result>( vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValue<uint64_t> Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter ) const
+  {
+    uint64_t counterValue;
+    Result result = static_cast<Result>( vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
+    return createResultValue( result, counterValue, "vk::Device::getSwapchainCounterEXT", { Result::eSuccess, Result::eErrorDeviceLost, Result::eErrorOutOfDateKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHX( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures ) const
+  {
+    vkGetDeviceGroupPeerMemoryFeaturesKHX( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlagsKHX*>( pPeerMemoryFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PeerMemoryFeatureFlagsKHX Device::getGroupPeerMemoryFeaturesKHX( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const
+  {
+    PeerMemoryFeatureFlagsKHX peerMemoryFeatures;
+    vkGetDeviceGroupPeerMemoryFeaturesKHX( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlagsKHX*>( &peerMemoryFeatures ) );
+    return peerMemoryFeatures;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHX( uint32_t bindInfoCount, const BindBufferMemoryInfoKHX* pBindInfos ) const
+  {
+    return static_cast<Result>( vkBindBufferMemory2KHX( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfoKHX*>( pBindInfos ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2KHX( ArrayProxy<const BindBufferMemoryInfoKHX> bindInfos ) const
+  {
+    Result result = static_cast<Result>( vkBindBufferMemory2KHX( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfoKHX*>( bindInfos.data() ) ) );
+    return createResultValue( result, "vk::Device::bindBufferMemory2KHX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::bindImageMemory2KHX( uint32_t bindInfoCount, const BindImageMemoryInfoKHX* pBindInfos ) const
+  {
+    return static_cast<Result>( vkBindImageMemory2KHX( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfoKHX*>( pBindInfos ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2KHX( ArrayProxy<const BindImageMemoryInfoKHX> bindInfos ) const
+  {
+    Result result = static_cast<Result>( vkBindImageMemory2KHX( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfoKHX*>( bindInfos.data() ) ) );
+    return createResultValue( result, "vk::Device::bindImageMemory2KHX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHX( DeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities ) const
+  {
+    return static_cast<Result>( vkGetDeviceGroupPresentCapabilitiesKHX( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHX*>( pDeviceGroupPresentCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DeviceGroupPresentCapabilitiesKHX>::type Device::getGroupPresentCapabilitiesKHX() const
+  {
+    DeviceGroupPresentCapabilitiesKHX deviceGroupPresentCapabilities;
+    Result result = static_cast<Result>( vkGetDeviceGroupPresentCapabilitiesKHX( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHX*>( &deviceGroupPresentCapabilities ) ) );
+    return createResultValue( result, deviceGroupPresentCapabilities, "vk::Device::getGroupPresentCapabilitiesKHX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHX( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHX* pModes ) const
+  {
+    return static_cast<Result>( vkGetDeviceGroupSurfacePresentModesKHX( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHX*>( pModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DeviceGroupPresentModeFlagsKHX>::type Device::getGroupSurfacePresentModesKHX( SurfaceKHR surface ) const
+  {
+    DeviceGroupPresentModeFlagsKHX modes;
+    Result result = static_cast<Result>( vkGetDeviceGroupSurfacePresentModesKHX( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHX*>( &modes ) ) );
+    return createResultValue( result, modes, "vk::Device::getGroupSurfacePresentModesKHX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::acquireNextImage2KHX( const AcquireNextImageInfoKHX* pAcquireInfo, uint32_t* pImageIndex ) const
+  {
+    return static_cast<Result>( vkAcquireNextImage2KHX( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHX*>( pAcquireInfo ), pImageIndex ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHX( const AcquireNextImageInfoKHX & acquireInfo ) const
+  {
+    uint32_t imageIndex;
+    Result result = static_cast<Result>( vkAcquireNextImage2KHX( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHX*>( &acquireInfo ), &imageIndex ) );
+    return createResultValue( result, imageIndex, "vk::Device::acquireNextImage2KHX", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate ) const
+  {
+    return static_cast<Result>( vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplateKHR*>( pDescriptorUpdateTemplate ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DescriptorUpdateTemplateKHR>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DescriptorUpdateTemplateKHR descriptorUpdateTemplate;
+    Result result = static_cast<Result>( vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplateKHR*>( &descriptorUpdateTemplate ) ) );
+    return createResultValue( result, descriptorUpdateTemplate, "vk::Device::createDescriptorUpdateTemplateKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueDescriptorUpdateTemplateKHR Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DescriptorUpdateTemplateKHRDeleter deleter( *this, allocator );
+    return UniqueDescriptorUpdateTemplateKHR( createDescriptorUpdateTemplateKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplateKHR descriptorUpdateTemplate, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplateKHR>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplateKHR descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplateKHR>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData ) const
+  {
+    vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplateKHR>( descriptorUpdateTemplate ), pData );
+  }
+
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata ) const
+  {
+    vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR*>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT*>( pMetadata ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const SwapchainKHR> swapchains, ArrayProxy<const HdrMetadataEXT> metadata ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( swapchains.size() == metadata.size() );
+#else
+    if ( swapchains.size() != metadata.size() )
+    {
+      throw LogicError( "vk::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+    }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+    vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast<const VkSwapchainKHR*>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT*>( metadata.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain ) const
+  {
+    return static_cast<Result>( vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain ) const
+  {
+    Result result = static_cast<Result>( vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+    return createResultValue( result, "vk::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const
+  {
+    return static_cast<Result>( vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( pDisplayTimingProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain ) const
+  {
+    RefreshCycleDurationGOOGLE displayTimingProperties;
+    Result result = static_cast<Result>( vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( &displayTimingProperties ) ) );
+    return createResultValue( result, displayTimingProperties, "vk::Device::getRefreshCycleDurationGOOGLE" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings ) const
+  {
+    return static_cast<Result>( vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( pPresentationTimings ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain ) const
+  {
+    std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings;
+    uint32_t presentationTimingCount;
+    Result result = static_cast<Result>( vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
+    if ( ( result == Result::eSuccess ) && presentationTimingCount )
+    {
+      presentationTimings.resize( presentationTimingCount );
+      result = static_cast<Result>( vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( presentationTimings.data() ) ) );
+    }
+    return createResultValue( result, presentationTimings, "vk::Device::getPastPresentationTimingGOOGLE" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2KHR* pInfo, MemoryRequirements2KHR* pMemoryRequirements ) const
+  {
+    vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2KHR*>( pInfo ), reinterpret_cast<VkMemoryRequirements2KHR*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE MemoryRequirements2KHR Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2KHR & info ) const
+  {
+    MemoryRequirements2KHR memoryRequirements;
+    vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2KHR*>( &info ), reinterpret_cast<VkMemoryRequirements2KHR*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2KHR* pInfo, MemoryRequirements2KHR* pMemoryRequirements ) const
+  {
+    vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2KHR*>( pInfo ), reinterpret_cast<VkMemoryRequirements2KHR*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE MemoryRequirements2KHR Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2KHR & info ) const
+  {
+    MemoryRequirements2KHR memoryRequirements;
+    vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2KHR*>( &info ), reinterpret_cast<VkMemoryRequirements2KHR*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2KHR* pSparseMemoryRequirements ) const
+  {
+    vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2KHR*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2KHR*>( pSparseMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2KHR,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2KHR & info ) const
+  {
+    std::vector<SparseImageMemoryRequirements2KHR,Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2KHR*>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2KHR*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2KHR*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class DeviceDeleter;
+  using UniqueDevice = UniqueHandle<Device, DeviceDeleter>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class PhysicalDevice
+  {
+  public:
+    PhysicalDevice()
+      : m_physicalDevice(VK_NULL_HANDLE)
+    {}
+
+    PhysicalDevice( std::nullptr_t )
+      : m_physicalDevice(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice )
+       : m_physicalDevice( physicalDevice )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PhysicalDevice & operator=(VkPhysicalDevice physicalDevice)
+    {
+      m_physicalDevice = physicalDevice;
+      return *this; 
+    }
+#endif
+
+    PhysicalDevice & operator=( std::nullptr_t )
+    {
+      m_physicalDevice = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( PhysicalDevice const & rhs ) const
+    {
+      return m_physicalDevice == rhs.m_physicalDevice;
+    }
+
+    bool operator!=(PhysicalDevice const & rhs ) const
+    {
+      return m_physicalDevice != rhs.m_physicalDevice;
+    }
+
+    bool operator<(PhysicalDevice const & rhs ) const
+    {
+      return m_physicalDevice < rhs.m_physicalDevice;
+    }
+
+    void getProperties( PhysicalDeviceProperties* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PhysicalDeviceProperties getProperties() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<QueueFamilyProperties>> 
+    std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PhysicalDeviceMemoryProperties getMemoryProperties() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getFeatures( PhysicalDeviceFeatures* pFeatures ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PhysicalDeviceFeatures getFeatures() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getFormatProperties( Format format, FormatProperties* pFormatProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    FormatProperties getFormatProperties( Format format ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<ImageFormatProperties>::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueDevice createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<LayerProperties>> 
+    typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<ExtensionProperties>> 
+    typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<SparseImageFormatProperties>> 
+    std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<DisplayPropertiesKHR>> 
+    typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<DisplayPlanePropertiesKHR>> 
+    typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<DisplayKHR>> 
+    typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<DisplayModePropertiesKHR>> 
+    typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( DisplayKHR display ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DisplayModeKHR>::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+    Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+    Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( SurfaceKHR surface ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<SurfaceFormatKHR>> 
+    typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( SurfaceKHR surface ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<PresentModeKHR>> 
+    typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( SurfaceKHR surface ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+    Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getFeatures2KHR( PhysicalDeviceFeatures2KHR* pFeatures ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PhysicalDeviceFeatures2KHR getFeatures2KHR() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getProperties2KHR( PhysicalDeviceProperties2KHR* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PhysicalDeviceProperties2KHR getProperties2KHR() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getFormatProperties2KHR( Format format, FormatProperties2KHR* pFormatProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    FormatProperties2KHR getFormatProperties2KHR( Format format ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, ImageFormatProperties2KHR* pImageFormatProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<ImageFormatProperties2KHR>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR & imageFormatInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2KHR* pQueueFamilyProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<QueueFamilyProperties2KHR>> 
+    std::vector<QueueFamilyProperties2KHR,Allocator> getQueueFamilyProperties2KHR() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2KHR* pMemoryProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PhysicalDeviceMemoryProperties2KHR getMemoryProperties2KHR() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2KHR* pProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<SparseImageFormatProperties2KHR>> 
+    std::vector<SparseImageFormatProperties2KHR,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR & formatInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, ExternalBufferPropertiesKHR* pExternalBufferProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ExternalBufferPropertiesKHR getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfoKHR & externalBufferInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, ExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ExternalSemaphorePropertiesKHR getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfoKHR & externalSemaphoreInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, ExternalFencePropertiesKHR* pExternalFenceProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ExternalFencePropertiesKHR getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfoKHR & externalFenceInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    Result releaseDisplayEXT( DisplayKHR display ) const;
+#else
+    ResultValueType<void>::type releaseDisplayEXT( DisplayKHR display ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    Result acquireXlibDisplayEXT( Display* dpy, DisplayKHR display ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<Display>::type acquireXlibDisplayEXT( DisplayKHR display ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+    Result getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( SurfaceKHR surface ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getPresentRectanglesKHX( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<Rect2D>> 
+    typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHX( SurfaceKHR surface ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<SurfaceFormat2KHR>> 
+    typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const
+    {
+      return m_physicalDevice;
+    }
+
+    explicit operator bool() const
+    {
+      return m_physicalDevice != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_physicalDevice == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPhysicalDevice m_physicalDevice;
+  };
+
+  static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class DeviceDeleter
+  {
+  public:
+    DeviceDeleter( Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_allocator( allocator )
+    {}
+
+    void operator()( Device device )
+    {
+      device.destroy( m_allocator );
+    }
+
+  private:
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( PhysicalDeviceProperties* pProperties ) const
+  {
+    vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PhysicalDeviceProperties PhysicalDevice::getProperties() const
+  {
+    PhysicalDeviceProperties properties;
+    vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties ) const
+  {
+    vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties() const
+  {
+    std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties ) const
+  {
+    vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const
+  {
+    PhysicalDeviceMemoryProperties memoryProperties;
+    vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
+    return memoryProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( PhysicalDeviceFeatures* pFeatures ) const
+  {
+    vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PhysicalDeviceFeatures PhysicalDevice::getFeatures() const
+  {
+    PhysicalDeviceFeatures features;
+    vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
+    return features;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( Format format, FormatProperties* pFormatProperties ) const
+  {
+    vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE FormatProperties PhysicalDevice::getFormatProperties( Format format ) const
+  {
+    FormatProperties formatProperties;
+    vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
+    return formatProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags ) const
+  {
+    ImageFormatProperties imageFormatProperties;
+    Result result = static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, "vk::PhysicalDevice::getImageFormatProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice ) const
+  {
+    return static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    Device device;
+    Result result = static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
+    return createResultValue( result, device, "vk::PhysicalDevice::createDevice" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueDevice PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DeviceDeleter deleter( allocator );
+    return UniqueDevice( createDevice( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties ) const
+  {
+    return static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties() const
+  {
+    std::vector<LayerProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() );
+    properties.resize( propertyCount );
+    return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceLayerProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties ) const
+  {
+    return static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName ) const
+  {
+    std::vector<ExtensionProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() );
+    properties.resize( propertyCount );
+    return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceExtensionProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties ) const
+  {
+    vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling ) const
+  {
+    std::vector<SparseImageFormatProperties,Allocator> properties;
+    uint32_t propertyCount;
+    vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR() const
+  {
+    std::vector<DisplayPropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() );
+    properties.resize( propertyCount );
+    return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR() const
+  {
+    std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() );
+    properties.resize( propertyCount );
+    return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays ) const
+  {
+    return static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
+  {
+    std::vector<DisplayKHR,Allocator> displays;
+    uint32_t displayCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && displayCount )
+      {
+        displays.resize( displayCount );
+        result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( displayCount <= displays.size() );
+    displays.resize( displayCount );
+    return createResultValue( result, displays, "vk::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties ) const
+  {
+    return static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display ) const
+  {
+    std::vector<DisplayModePropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( propertyCount <= properties.size() );
+    properties.resize( propertyCount );
+    return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayModePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode ) const
+  {
+    return static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DisplayModeKHR mode;
+    Result result = static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
+    return createResultValue( result, mode, "vk::PhysicalDevice::createDisplayModeKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities ) const
+  {
+    return static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex ) const
+  {
+    DisplayPlaneCapabilitiesKHR capabilities;
+    Result result = static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
+    return createResultValue( result, capabilities, "vk::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection ) const
+  {
+    return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection ) const
+  {
+    return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), pSupported ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface ) const
+  {
+    Bool32 supported;
+    Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), &supported ) );
+    return createResultValue( result, supported, "vk::PhysicalDevice::getSurfaceSupportKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface ) const
+  {
+    SurfaceCapabilitiesKHR surfaceCapabilities;
+    Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface ) const
+  {
+    std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( surfaceFormatCount <= surfaceFormats.size() );
+    surfaceFormats.resize( surfaceFormatCount );
+    return createResultValue( result, surfaceFormats, "vk::PhysicalDevice::getSurfaceFormatsKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes;
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( presentModeCount <= presentModes.size() );
+    presentModes.resize( presentModeCount );
+    return createResultValue( result, presentModes, "vk::PhysicalDevice::getSurfacePresentModesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display ) const
+  {
+    return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const
+  {
+    return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const
+  {
+    return vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const
+  {
+    return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const
+  {
+    return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const
+  {
+    return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const
+  {
+    return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
+  {
+    ExternalImageFormatPropertiesNV externalImageFormatProperties;
+    Result result = static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
+    return createResultValue( result, externalImageFormatProperties, "vk::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const
+  {
+    vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features ) const
+  {
+    DeviceGeneratedCommandsLimitsNVX limits;
+    vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
+    return limits;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( PhysicalDeviceFeatures2KHR* pFeatures ) const
+  {
+    vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2KHR*>( pFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PhysicalDeviceFeatures2KHR PhysicalDevice::getFeatures2KHR() const
+  {
+    PhysicalDeviceFeatures2KHR features;
+    vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2KHR*>( &features ) );
+    return features;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( PhysicalDeviceProperties2KHR* pProperties ) const
+  {
+    vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2KHR*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PhysicalDeviceProperties2KHR PhysicalDevice::getProperties2KHR() const
+  {
+    PhysicalDeviceProperties2KHR properties;
+    vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2KHR*>( &properties ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( Format format, FormatProperties2KHR* pFormatProperties ) const
+  {
+    vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2KHR*>( pFormatProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE FormatProperties2KHR PhysicalDevice::getFormatProperties2KHR( Format format ) const
+  {
+    FormatProperties2KHR formatProperties;
+    vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2KHR*>( &formatProperties ) );
+    return formatProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, ImageFormatProperties2KHR* pImageFormatProperties ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2KHR*>( pImageFormatProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties2KHR>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR & imageFormatInfo ) const
+  {
+    ImageFormatProperties2KHR imageFormatProperties;
+    Result result = static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2KHR*>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, "vk::PhysicalDevice::getImageFormatProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2KHR* pQueueFamilyProperties ) const
+  {
+    vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2KHR*>( pQueueFamilyProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2KHR,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR() const
+  {
+    std::vector<QueueFamilyProperties2KHR,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2KHR*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2KHR* pMemoryProperties ) const
+  {
+    vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2KHR*>( pMemoryProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2KHR PhysicalDevice::getMemoryProperties2KHR() const
+  {
+    PhysicalDeviceMemoryProperties2KHR memoryProperties;
+    vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2KHR*>( &memoryProperties ) );
+    return memoryProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2KHR* pProperties ) const
+  {
+    vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2KHR*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2KHR,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR & formatInfo ) const
+  {
+    std::vector<SparseImageFormatProperties2KHR,Allocator> properties;
+    uint32_t propertyCount;
+    vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2KHR*>( properties.data() ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, ExternalBufferPropertiesKHR* pExternalBufferProperties ) const
+  {
+    vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfoKHR*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferPropertiesKHR*>( pExternalBufferProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ExternalBufferPropertiesKHR PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfoKHR & externalBufferInfo ) const
+  {
+    ExternalBufferPropertiesKHR externalBufferProperties;
+    vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfoKHR*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferPropertiesKHR*>( &externalBufferProperties ) );
+    return externalBufferProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, ExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties ) const
+  {
+    vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfoKHR*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphorePropertiesKHR*>( pExternalSemaphoreProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ExternalSemaphorePropertiesKHR PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfoKHR & externalSemaphoreInfo ) const
+  {
+    ExternalSemaphorePropertiesKHR externalSemaphoreProperties;
+    vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfoKHR*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphorePropertiesKHR*>( &externalSemaphoreProperties ) );
+    return externalSemaphoreProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, ExternalFencePropertiesKHR* pExternalFenceProperties ) const
+  {
+    vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfoKHR*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFencePropertiesKHR*>( pExternalFenceProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ExternalFencePropertiesKHR PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfoKHR & externalFenceInfo ) const
+  {
+    ExternalFencePropertiesKHR externalFenceProperties;
+    vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfoKHR*>( &externalFenceInfo ), reinterpret_cast<VkExternalFencePropertiesKHR*>( &externalFenceProperties ) );
+    return externalFenceProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( DisplayKHR display ) const
+  {
+    return static_cast<Result>( vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+  }
+#else
+  VULKAN_HPP_INLINE ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( DisplayKHR display ) const
+  {
+    Result result = static_cast<Result>( vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+    return createResultValue( result, "vk::PhysicalDevice::releaseDisplayEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+  VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, DisplayKHR display ) const
+  {
+    return static_cast<Result>( vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( DisplayKHR display ) const
+  {
+    Display dpy;
+    Result result = static_cast<Result>( vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
+    return createResultValue( result, dpy, "vk::PhysicalDevice::acquireXlibDisplayEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+  VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay ) const
+  {
+    return static_cast<Result>( vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( pDisplay ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const
+  {
+    DisplayKHR display;
+    Result result = static_cast<Result>( vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+    return createResultValue( result, display, "vk::PhysicalDevice::getRandROutputDisplayEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface ) const
+  {
+    SurfaceCapabilities2EXT surfaceCapabilities;
+    Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilities2EXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHX( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDevicePresentRectanglesKHX( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D*>( pRects ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHX( SurfaceKHR surface ) const
+  {
+    std::vector<Rect2D,Allocator> rects;
+    uint32_t rectCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetPhysicalDevicePresentRectanglesKHX( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = static_cast<Result>( vkGetPhysicalDevicePresentRectanglesKHX( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( rectCount <= rects.size() );
+    rects.resize( rectCount );
+    return createResultValue( result, rects, "vk::PhysicalDevice::getPresentRectanglesKHX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( pSurfaceCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+  {
+    SurfaceCapabilities2KHR surfaceCapabilities;
+    Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilities2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats ) const
+  {
+    return static_cast<Result>( vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( pSurfaceFormats ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+  {
+    std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( surfaceFormatCount <= surfaceFormats.size() );
+    surfaceFormats.resize( surfaceFormatCount );
+    return createResultValue( result, surfaceFormats, "vk::PhysicalDevice::getSurfaceFormats2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  struct CmdProcessCommandsInfoNVX
+  {
+    CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 )
+      : sType( StructureType::eCmdProcessCommandsInfoNVX )
+      , pNext( nullptr )
+      , objectTable( objectTable_ )
+      , indirectCommandsLayout( indirectCommandsLayout_ )
+      , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
+      , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
+      , maxSequencesCount( maxSequencesCount_ )
+      , targetCommandBuffer( targetCommandBuffer_ )
+      , sequencesCountBuffer( sequencesCountBuffer_ )
+      , sequencesCountOffset( sequencesCountOffset_ )
+      , sequencesIndexBuffer( sequencesIndexBuffer_ )
+      , sequencesIndexOffset( sequencesIndexOffset_ )
+    {
+    }
+
+    CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) );
+    }
+
+    CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) );
+      return *this;
+    }
+    CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
+    {
+      objectTable = objectTable_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
+    {
+      indirectCommandsTokenCount = indirectCommandsTokenCount_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
+    {
+      pIndirectCommandsTokens = pIndirectCommandsTokens_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
+    {
+      maxSequencesCount = maxSequencesCount_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
+    {
+      targetCommandBuffer = targetCommandBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
+    {
+      sequencesCountBuffer = sequencesCountBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
+    {
+      sequencesCountOffset = sequencesCountOffset_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
+    {
+      sequencesIndexBuffer = sequencesIndexBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
+    {
+      sequencesIndexOffset = sequencesIndexOffset_;
+      return *this;
+    }
+
+    operator const VkCmdProcessCommandsInfoNVX&() const
+    {
+      return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
+    }
+
+    bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectTable == rhs.objectTable )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
+          && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
+          && ( maxSequencesCount == rhs.maxSequencesCount )
+          && ( targetCommandBuffer == rhs.targetCommandBuffer )
+          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+          && ( sequencesCountOffset == rhs.sequencesCountOffset )
+          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
+    }
+
+    bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    ObjectTableNVX objectTable;
+    IndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t indirectCommandsTokenCount;
+    const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
+    uint32_t maxSequencesCount;
+    CommandBuffer targetCommandBuffer;
+    Buffer sequencesCountBuffer;
+    DeviceSize sequencesCountOffset;
+    Buffer sequencesIndexBuffer;
+    DeviceSize sequencesIndexOffset;
+  };
+  static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
+
+  struct PhysicalDeviceGroupPropertiesKHX
+  {
+    operator const VkPhysicalDeviceGroupPropertiesKHX&() const
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceGroupPropertiesKHX*>(this);
+    }
+
+    bool operator==( PhysicalDeviceGroupPropertiesKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE_KHX * sizeof( PhysicalDevice ) ) == 0 )
+          && ( subsetAllocation == rhs.subsetAllocation );
+    }
+
+    bool operator!=( PhysicalDeviceGroupPropertiesKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    void* pNext;
+    uint32_t physicalDeviceCount;
+    PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE_KHX];
+    Bool32 subsetAllocation;
+  };
+  static_assert( sizeof( PhysicalDeviceGroupPropertiesKHX ) == sizeof( VkPhysicalDeviceGroupPropertiesKHX ), "struct and wrapper have different size!" );
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class DebugReportCallbackEXTDeleter;
+  using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, DebugReportCallbackEXTDeleter>;
+  class SurfaceKHRDeleter;
+  using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, SurfaceKHRDeleter>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Instance
+  {
+  public:
+    Instance()
+      : m_instance(VK_NULL_HANDLE)
+    {}
+
+    Instance( std::nullptr_t )
+      : m_instance(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance )
+       : m_instance( instance )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Instance & operator=(VkInstance instance)
+    {
+      m_instance = instance;
+      return *this; 
+    }
+#endif
+
+    Instance & operator=( std::nullptr_t )
+    {
+      m_instance = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Instance const & rhs ) const
+    {
+      return m_instance == rhs.m_instance;
+    }
+
+    bool operator!=(Instance const & rhs ) const
+    {
+      return m_instance != rhs.m_instance;
+    }
+
+    bool operator<(Instance const & rhs ) const
+    {
+      return m_instance < rhs.m_instance;
+    }
+
+    void destroy( const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<PhysicalDevice>> 
+    typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    PFN_vkVoidFunction getProcAddr( const char* pName ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    PFN_vkVoidFunction getProcAddr( const std::string & name ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+    Result createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+    void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+    Result createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+    Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueDebugReportCallbackEXT createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    Result enumeratePhysicalDeviceGroupsKHX( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupPropertiesKHX* pPhysicalDeviceGroupProperties ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template <typename Allocator = std::allocator<PhysicalDeviceGroupPropertiesKHX>> 
+    typename ResultValueType<std::vector<PhysicalDeviceGroupPropertiesKHX,Allocator>>::type enumeratePhysicalDeviceGroupsKHX() const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    Result createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    Result createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    ResultValueType<SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    UniqueSurfaceKHR createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const
+    {
+      return m_instance;
+    }
+
+    explicit operator bool() const
+    {
+      return m_instance != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const
+    {
+      return m_instance == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkInstance m_instance;
+  };
+
+  static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class DebugReportCallbackEXTDeleter
+  {
+  public:
+    DebugReportCallbackEXTDeleter( Instance instance = Instance(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_instance( instance )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( DebugReportCallbackEXT debugReportCallbackEXT )
+    {
+      m_instance.destroyDebugReportCallbackEXT( debugReportCallbackEXT, m_allocator );
+    }
+
+  private:
+    Instance m_instance;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+
+  class SurfaceKHRDeleter
+  {
+  public:
+    SurfaceKHRDeleter( Instance instance = Instance(), Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_instance( instance )
+      , m_allocator( allocator )
+    {}
+
+    void operator()( SurfaceKHR surfaceKHR )
+    {
+      m_instance.destroySurfaceKHR( surfaceKHR, m_allocator );
+    }
+
+  private:
+    Instance m_instance;
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  VULKAN_HPP_INLINE void Instance::destroy( const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices ) const
+  {
+    return static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices() const
+  {
+    std::vector<PhysicalDevice,Allocator> physicalDevices;
+    uint32_t physicalDeviceCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+      {
+        physicalDevices.resize( physicalDeviceCount );
+        result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( physicalDeviceCount <= physicalDevices.size() );
+    physicalDevices.resize( physicalDeviceCount );
+    return createResultValue( result, physicalDevices, "vk::Instance::enumeratePhysicalDevices" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName ) const
+  {
+    return vkGetInstanceProcAddr( m_instance, pName );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const
+  {
+    return vkGetInstanceProcAddr( m_instance, name.c_str() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createAndroidSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createAndroidSurfaceKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createDisplayPlaneSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createDisplayPlaneSurfaceKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+  VULKAN_HPP_INLINE Result Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createMirSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createMirSurfaceKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createViSurfaceNN" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createViSurfaceNN( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createWaylandSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createWaylandSurfaceKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createWin32SurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createWin32SurfaceKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createXlibSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createXlibSurfaceKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createXcbSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createXcbSurfaceKHR( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+  VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback ) const
+  {
+    return static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DebugReportCallbackEXT callback;
+    Result result = static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
+    return createResultValue( result, callback, "vk::Instance::createDebugReportCallbackEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueDebugReportCallbackEXT Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    DebugReportCallbackEXTDeleter deleter( *this, allocator );
+    return UniqueDebugReportCallbackEXT( createDebugReportCallbackEXT( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator ) const
+  {
+    vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator ) const
+  {
+    vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const
+  {
+    vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    assert( layerPrefix.size() == message.size() );
+#else
+    if ( layerPrefix.size() != message.size() )
+    {
+      throw LogicError( "vk::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
+    }
+#endif  // VULKAN_HPP_NO_EXCEPTIONS
+    vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHX( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupPropertiesKHX* pPhysicalDeviceGroupProperties ) const
+  {
+    return static_cast<Result>( vkEnumeratePhysicalDeviceGroupsKHX( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupPropertiesKHX*>( pPhysicalDeviceGroupProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Allocator> 
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupPropertiesKHX,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHX() const
+  {
+    std::vector<PhysicalDeviceGroupPropertiesKHX,Allocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( vkEnumeratePhysicalDeviceGroupsKHX( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( vkEnumeratePhysicalDeviceGroupsKHX( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupPropertiesKHX*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    assert( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    return createResultValue( result, physicalDeviceGroupProperties, "vk::Instance::enumeratePhysicalDeviceGroupsKHX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createIOSSurfaceMVK" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createIOSSurfaceMVK( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
+  {
+    return static_cast<Result>( vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHR surface;
+    Result result = static_cast<Result>( vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, "vk::Instance::createMacOSSurfaceMVK" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator ) const
+  {
+    SurfaceKHRDeleter deleter( *this, allocator );
+    return UniqueSurfaceKHR( createMacOSSurfaceMVK( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  struct DeviceGroupDeviceCreateInfoKHX
+  {
+    DeviceGroupDeviceCreateInfoKHX( uint32_t physicalDeviceCount_ = 0, const PhysicalDevice* pPhysicalDevices_ = nullptr )
+      : sType( StructureType::eDeviceGroupDeviceCreateInfoKHX )
+      , pNext( nullptr )
+      , physicalDeviceCount( physicalDeviceCount_ )
+      , pPhysicalDevices( pPhysicalDevices_ )
+    {
+    }
+
+    DeviceGroupDeviceCreateInfoKHX( VkDeviceGroupDeviceCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfoKHX ) );
+    }
+
+    DeviceGroupDeviceCreateInfoKHX& operator=( VkDeviceGroupDeviceCreateInfoKHX const & rhs )
+    {
+      memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfoKHX ) );
+      return *this;
+    }
+    DeviceGroupDeviceCreateInfoKHX& setPNext( const void* pNext_ )
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfoKHX& setPhysicalDeviceCount( uint32_t physicalDeviceCount_ )
+    {
+      physicalDeviceCount = physicalDeviceCount_;
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfoKHX& setPPhysicalDevices( const PhysicalDevice* pPhysicalDevices_ )
+    {
+      pPhysicalDevices = pPhysicalDevices_;
+      return *this;
+    }
+
+    operator const VkDeviceGroupDeviceCreateInfoKHX&() const
+    {
+      return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfoKHX*>(this);
+    }
+
+    bool operator==( DeviceGroupDeviceCreateInfoKHX const& rhs ) const
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( pPhysicalDevices == rhs.pPhysicalDevices );
+    }
+
+    bool operator!=( DeviceGroupDeviceCreateInfoKHX const& rhs ) const
+    {
+      return !operator==( rhs );
+    }
+
+  private:
+    StructureType sType;
+
+  public:
+    const void* pNext;
+    uint32_t physicalDeviceCount;
+    const PhysicalDevice* pPhysicalDevices;
+  };
+  static_assert( sizeof( DeviceGroupDeviceCreateInfoKHX ) == sizeof( VkDeviceGroupDeviceCreateInfoKHX ), "struct and wrapper have different size!" );
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class InstanceDeleter;
+  using UniqueInstance = UniqueHandle<Instance, InstanceDeleter>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr );
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  UniqueInstance createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr );
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class InstanceDeleter
+  {
+  public:
+    InstanceDeleter( Optional<const AllocationCallbacks> allocator = nullptr )
+      : m_allocator( allocator )
+    {}
+
+    void operator()( Instance instance )
+    {
+      instance.destroy( m_allocator );
+    }
+
+  private:
+    Optional<const AllocationCallbacks> m_allocator;
+  };
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
+  {
+    return static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator )
+  {
+    Instance instance;
+    Result result = static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
+    return createResultValue( result, instance, "vk::createInstance" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  VULKAN_HPP_INLINE UniqueInstance createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator )
+  {
+    InstanceDeleter deleter( allocator );
+    return UniqueInstance( createInstance( createInfo, allocator ), deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlagsKHR)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
+  {
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+  VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+  VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_MIR_KHR*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagBitsNN)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagsNN)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagBitsMVK)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagsMVK)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagBitsMVK)
+  {
+    return "(void)";
+  }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagsMVK)
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagBitsKHR)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagsKHR)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagBitsNV)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagsNV)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagBitsEXT)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagsEXT)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagBitsNV)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagsNV)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagBitsNV)
+  {
+    return "(void)";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagsNV)
+  {
+    return "{}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
+  {
+    switch (value)
+    {
+    case ImageLayout::eUndefined: return "Undefined";
+    case ImageLayout::eGeneral: return "General";
+    case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
+    case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
+    case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
+    case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
+    case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
+    case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
+    case ImageLayout::ePreinitialized: return "Preinitialized";
+    case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
+    case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
+  {
+    switch (value)
+    {
+    case AttachmentLoadOp::eLoad: return "Load";
+    case AttachmentLoadOp::eClear: return "Clear";
+    case AttachmentLoadOp::eDontCare: return "DontCare";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
+  {
+    switch (value)
+    {
+    case AttachmentStoreOp::eStore: return "Store";
+    case AttachmentStoreOp::eDontCare: return "DontCare";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageType value)
+  {
+    switch (value)
+    {
+    case ImageType::e1D: return "1D";
+    case ImageType::e2D: return "2D";
+    case ImageType::e3D: return "3D";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
+  {
+    switch (value)
+    {
+    case ImageTiling::eOptimal: return "Optimal";
+    case ImageTiling::eLinear: return "Linear";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
+  {
+    switch (value)
+    {
+    case ImageViewType::e1D: return "1D";
+    case ImageViewType::e2D: return "2D";
+    case ImageViewType::e3D: return "3D";
+    case ImageViewType::eCube: return "Cube";
+    case ImageViewType::e1DArray: return "1DArray";
+    case ImageViewType::e2DArray: return "2DArray";
+    case ImageViewType::eCubeArray: return "CubeArray";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
+  {
+    switch (value)
+    {
+    case CommandBufferLevel::ePrimary: return "Primary";
+    case CommandBufferLevel::eSecondary: return "Secondary";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
+  {
+    switch (value)
+    {
+    case ComponentSwizzle::eIdentity: return "Identity";
+    case ComponentSwizzle::eZero: return "Zero";
+    case ComponentSwizzle::eOne: return "One";
+    case ComponentSwizzle::eR: return "R";
+    case ComponentSwizzle::eG: return "G";
+    case ComponentSwizzle::eB: return "B";
+    case ComponentSwizzle::eA: return "A";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
+  {
+    switch (value)
+    {
+    case DescriptorType::eSampler: return "Sampler";
+    case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
+    case DescriptorType::eSampledImage: return "SampledImage";
+    case DescriptorType::eStorageImage: return "StorageImage";
+    case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
+    case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
+    case DescriptorType::eUniformBuffer: return "UniformBuffer";
+    case DescriptorType::eStorageBuffer: return "StorageBuffer";
+    case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
+    case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
+    case DescriptorType::eInputAttachment: return "InputAttachment";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryType value)
+  {
+    switch (value)
+    {
+    case QueryType::eOcclusion: return "Occlusion";
+    case QueryType::ePipelineStatistics: return "PipelineStatistics";
+    case QueryType::eTimestamp: return "Timestamp";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BorderColor value)
+  {
+    switch (value)
+    {
+    case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
+    case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
+    case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
+    case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
+    case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
+    case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
+  {
+    switch (value)
+    {
+    case PipelineBindPoint::eGraphics: return "Graphics";
+    case PipelineBindPoint::eCompute: return "Compute";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
+  {
+    switch (value)
+    {
+    case PipelineCacheHeaderVersion::eOne: return "One";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
+  {
+    switch (value)
+    {
+    case PrimitiveTopology::ePointList: return "PointList";
+    case PrimitiveTopology::eLineList: return "LineList";
+    case PrimitiveTopology::eLineStrip: return "LineStrip";
+    case PrimitiveTopology::eTriangleList: return "TriangleList";
+    case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
+    case PrimitiveTopology::eTriangleFan: return "TriangleFan";
+    case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
+    case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
+    case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
+    case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
+    case PrimitiveTopology::ePatchList: return "PatchList";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SharingMode value)
+  {
+    switch (value)
+    {
+    case SharingMode::eExclusive: return "Exclusive";
+    case SharingMode::eConcurrent: return "Concurrent";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(IndexType value)
+  {
+    switch (value)
+    {
+    case IndexType::eUint16: return "Uint16";
+    case IndexType::eUint32: return "Uint32";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(Filter value)
+  {
+    switch (value)
+    {
+    case Filter::eNearest: return "Nearest";
+    case Filter::eLinear: return "Linear";
+    case Filter::eCubicIMG: return "CubicIMG";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
+  {
+    switch (value)
+    {
+    case SamplerMipmapMode::eNearest: return "Nearest";
+    case SamplerMipmapMode::eLinear: return "Linear";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
+  {
+    switch (value)
+    {
+    case SamplerAddressMode::eRepeat: return "Repeat";
+    case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
+    case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
+    case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
+    case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CompareOp value)
+  {
+    switch (value)
+    {
+    case CompareOp::eNever: return "Never";
+    case CompareOp::eLess: return "Less";
+    case CompareOp::eEqual: return "Equal";
+    case CompareOp::eLessOrEqual: return "LessOrEqual";
+    case CompareOp::eGreater: return "Greater";
+    case CompareOp::eNotEqual: return "NotEqual";
+    case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
+    case CompareOp::eAlways: return "Always";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
+  {
+    switch (value)
+    {
+    case PolygonMode::eFill: return "Fill";
+    case PolygonMode::eLine: return "Line";
+    case PolygonMode::ePoint: return "Point";
+    case PolygonMode::eFillRectangleNV: return "FillRectangleNV";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
+  {
+    switch (value)
+    {
+    case CullModeFlagBits::eNone: return "None";
+    case CullModeFlagBits::eFront: return "Front";
+    case CullModeFlagBits::eBack: return "Back";
+    case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & CullModeFlagBits::eNone) result += "None | ";
+    if (value & CullModeFlagBits::eFront) result += "Front | ";
+    if (value & CullModeFlagBits::eBack) result += "Back | ";
+    if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FrontFace value)
+  {
+    switch (value)
+    {
+    case FrontFace::eCounterClockwise: return "CounterClockwise";
+    case FrontFace::eClockwise: return "Clockwise";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
+  {
+    switch (value)
+    {
+    case BlendFactor::eZero: return "Zero";
+    case BlendFactor::eOne: return "One";
+    case BlendFactor::eSrcColor: return "SrcColor";
+    case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
+    case BlendFactor::eDstColor: return "DstColor";
+    case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
+    case BlendFactor::eSrcAlpha: return "SrcAlpha";
+    case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
+    case BlendFactor::eDstAlpha: return "DstAlpha";
+    case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
+    case BlendFactor::eConstantColor: return "ConstantColor";
+    case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
+    case BlendFactor::eConstantAlpha: return "ConstantAlpha";
+    case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
+    case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
+    case BlendFactor::eSrc1Color: return "Src1Color";
+    case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
+    case BlendFactor::eSrc1Alpha: return "Src1Alpha";
+    case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BlendOp value)
+  {
+    switch (value)
+    {
+    case BlendOp::eAdd: return "Add";
+    case BlendOp::eSubtract: return "Subtract";
+    case BlendOp::eReverseSubtract: return "ReverseSubtract";
+    case BlendOp::eMin: return "Min";
+    case BlendOp::eMax: return "Max";
+    case BlendOp::eZeroEXT: return "ZeroEXT";
+    case BlendOp::eSrcEXT: return "SrcEXT";
+    case BlendOp::eDstEXT: return "DstEXT";
+    case BlendOp::eSrcOverEXT: return "SrcOverEXT";
+    case BlendOp::eDstOverEXT: return "DstOverEXT";
+    case BlendOp::eSrcInEXT: return "SrcInEXT";
+    case BlendOp::eDstInEXT: return "DstInEXT";
+    case BlendOp::eSrcOutEXT: return "SrcOutEXT";
+    case BlendOp::eDstOutEXT: return "DstOutEXT";
+    case BlendOp::eSrcAtopEXT: return "SrcAtopEXT";
+    case BlendOp::eDstAtopEXT: return "DstAtopEXT";
+    case BlendOp::eXorEXT: return "XorEXT";
+    case BlendOp::eMultiplyEXT: return "MultiplyEXT";
+    case BlendOp::eScreenEXT: return "ScreenEXT";
+    case BlendOp::eOverlayEXT: return "OverlayEXT";
+    case BlendOp::eDarkenEXT: return "DarkenEXT";
+    case BlendOp::eLightenEXT: return "LightenEXT";
+    case BlendOp::eColordodgeEXT: return "ColordodgeEXT";
+    case BlendOp::eColorburnEXT: return "ColorburnEXT";
+    case BlendOp::eHardlightEXT: return "HardlightEXT";
+    case BlendOp::eSoftlightEXT: return "SoftlightEXT";
+    case BlendOp::eDifferenceEXT: return "DifferenceEXT";
+    case BlendOp::eExclusionEXT: return "ExclusionEXT";
+    case BlendOp::eInvertEXT: return "InvertEXT";
+    case BlendOp::eInvertRgbEXT: return "InvertRgbEXT";
+    case BlendOp::eLineardodgeEXT: return "LineardodgeEXT";
+    case BlendOp::eLinearburnEXT: return "LinearburnEXT";
+    case BlendOp::eVividlightEXT: return "VividlightEXT";
+    case BlendOp::eLinearlightEXT: return "LinearlightEXT";
+    case BlendOp::ePinlightEXT: return "PinlightEXT";
+    case BlendOp::eHardmixEXT: return "HardmixEXT";
+    case BlendOp::eHslHueEXT: return "HslHueEXT";
+    case BlendOp::eHslSaturationEXT: return "HslSaturationEXT";
+    case BlendOp::eHslColorEXT: return "HslColorEXT";
+    case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT";
+    case BlendOp::ePlusEXT: return "PlusEXT";
+    case BlendOp::ePlusClampedEXT: return "PlusClampedEXT";
+    case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT";
+    case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT";
+    case BlendOp::eMinusEXT: return "MinusEXT";
+    case BlendOp::eMinusClampedEXT: return "MinusClampedEXT";
+    case BlendOp::eContrastEXT: return "ContrastEXT";
+    case BlendOp::eInvertOvgEXT: return "InvertOvgEXT";
+    case BlendOp::eRedEXT: return "RedEXT";
+    case BlendOp::eGreenEXT: return "GreenEXT";
+    case BlendOp::eBlueEXT: return "BlueEXT";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(StencilOp value)
+  {
+    switch (value)
+    {
+    case StencilOp::eKeep: return "Keep";
+    case StencilOp::eZero: return "Zero";
+    case StencilOp::eReplace: return "Replace";
+    case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
+    case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
+    case StencilOp::eInvert: return "Invert";
+    case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
+    case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(LogicOp value)
+  {
+    switch (value)
+    {
+    case LogicOp::eClear: return "Clear";
+    case LogicOp::eAnd: return "And";
+    case LogicOp::eAndReverse: return "AndReverse";
+    case LogicOp::eCopy: return "Copy";
+    case LogicOp::eAndInverted: return "AndInverted";
+    case LogicOp::eNoOp: return "NoOp";
+    case LogicOp::eXor: return "Xor";
+    case LogicOp::eOr: return "Or";
+    case LogicOp::eNor: return "Nor";
+    case LogicOp::eEquivalent: return "Equivalent";
+    case LogicOp::eInvert: return "Invert";
+    case LogicOp::eOrReverse: return "OrReverse";
+    case LogicOp::eCopyInverted: return "CopyInverted";
+    case LogicOp::eOrInverted: return "OrInverted";
+    case LogicOp::eNand: return "Nand";
+    case LogicOp::eSet: return "Set";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
+  {
+    switch (value)
+    {
+    case InternalAllocationType::eExecutable: return "Executable";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
+  {
+    switch (value)
+    {
+    case SystemAllocationScope::eCommand: return "Command";
+    case SystemAllocationScope::eObject: return "Object";
+    case SystemAllocationScope::eCache: return "Cache";
+    case SystemAllocationScope::eDevice: return "Device";
+    case SystemAllocationScope::eInstance: return "Instance";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
+  {
+    switch (value)
+    {
+    case PhysicalDeviceType::eOther: return "Other";
+    case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
+    case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
+    case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
+    case PhysicalDeviceType::eCpu: return "Cpu";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
+  {
+    switch (value)
+    {
+    case VertexInputRate::eVertex: return "Vertex";
+    case VertexInputRate::eInstance: return "Instance";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(Format value)
+  {
+    switch (value)
+    {
+    case Format::eUndefined: return "Undefined";
+    case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
+    case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
+    case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
+    case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
+    case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
+    case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
+    case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
+    case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
+    case Format::eR8Unorm: return "R8Unorm";
+    case Format::eR8Snorm: return "R8Snorm";
+    case Format::eR8Uscaled: return "R8Uscaled";
+    case Format::eR8Sscaled: return "R8Sscaled";
+    case Format::eR8Uint: return "R8Uint";
+    case Format::eR8Sint: return "R8Sint";
+    case Format::eR8Srgb: return "R8Srgb";
+    case Format::eR8G8Unorm: return "R8G8Unorm";
+    case Format::eR8G8Snorm: return "R8G8Snorm";
+    case Format::eR8G8Uscaled: return "R8G8Uscaled";
+    case Format::eR8G8Sscaled: return "R8G8Sscaled";
+    case Format::eR8G8Uint: return "R8G8Uint";
+    case Format::eR8G8Sint: return "R8G8Sint";
+    case Format::eR8G8Srgb: return "R8G8Srgb";
+    case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
+    case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
+    case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
+    case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
+    case Format::eR8G8B8Uint: return "R8G8B8Uint";
+    case Format::eR8G8B8Sint: return "R8G8B8Sint";
+    case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
+    case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
+    case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
+    case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
+    case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
+    case Format::eB8G8R8Uint: return "B8G8R8Uint";
+    case Format::eB8G8R8Sint: return "B8G8R8Sint";
+    case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
+    case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
+    case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
+    case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
+    case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
+    case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
+    case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
+    case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
+    case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
+    case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
+    case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
+    case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
+    case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
+    case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
+    case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
+    case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
+    case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
+    case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
+    case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
+    case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
+    case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
+    case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
+    case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
+    case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
+    case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
+    case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
+    case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
+    case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
+    case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
+    case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
+    case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
+    case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
+    case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
+    case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
+    case Format::eR16Unorm: return "R16Unorm";
+    case Format::eR16Snorm: return "R16Snorm";
+    case Format::eR16Uscaled: return "R16Uscaled";
+    case Format::eR16Sscaled: return "R16Sscaled";
+    case Format::eR16Uint: return "R16Uint";
+    case Format::eR16Sint: return "R16Sint";
+    case Format::eR16Sfloat: return "R16Sfloat";
+    case Format::eR16G16Unorm: return "R16G16Unorm";
+    case Format::eR16G16Snorm: return "R16G16Snorm";
+    case Format::eR16G16Uscaled: return "R16G16Uscaled";
+    case Format::eR16G16Sscaled: return "R16G16Sscaled";
+    case Format::eR16G16Uint: return "R16G16Uint";
+    case Format::eR16G16Sint: return "R16G16Sint";
+    case Format::eR16G16Sfloat: return "R16G16Sfloat";
+    case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
+    case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
+    case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
+    case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
+    case Format::eR16G16B16Uint: return "R16G16B16Uint";
+    case Format::eR16G16B16Sint: return "R16G16B16Sint";
+    case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
+    case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
+    case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
+    case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
+    case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
+    case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
+    case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
+    case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
+    case Format::eR32Uint: return "R32Uint";
+    case Format::eR32Sint: return "R32Sint";
+    case Format::eR32Sfloat: return "R32Sfloat";
+    case Format::eR32G32Uint: return "R32G32Uint";
+    case Format::eR32G32Sint: return "R32G32Sint";
+    case Format::eR32G32Sfloat: return "R32G32Sfloat";
+    case Format::eR32G32B32Uint: return "R32G32B32Uint";
+    case Format::eR32G32B32Sint: return "R32G32B32Sint";
+    case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
+    case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
+    case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
+    case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
+    case Format::eR64Uint: return "R64Uint";
+    case Format::eR64Sint: return "R64Sint";
+    case Format::eR64Sfloat: return "R64Sfloat";
+    case Format::eR64G64Uint: return "R64G64Uint";
+    case Format::eR64G64Sint: return "R64G64Sint";
+    case Format::eR64G64Sfloat: return "R64G64Sfloat";
+    case Format::eR64G64B64Uint: return "R64G64B64Uint";
+    case Format::eR64G64B64Sint: return "R64G64B64Sint";
+    case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
+    case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
+    case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
+    case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
+    case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
+    case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
+    case Format::eD16Unorm: return "D16Unorm";
+    case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
+    case Format::eD32Sfloat: return "D32Sfloat";
+    case Format::eS8Uint: return "S8Uint";
+    case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
+    case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
+    case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
+    case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
+    case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
+    case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
+    case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
+    case Format::eBc2UnormBlock: return "Bc2UnormBlock";
+    case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
+    case Format::eBc3UnormBlock: return "Bc3UnormBlock";
+    case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
+    case Format::eBc4UnormBlock: return "Bc4UnormBlock";
+    case Format::eBc4SnormBlock: return "Bc4SnormBlock";
+    case Format::eBc5UnormBlock: return "Bc5UnormBlock";
+    case Format::eBc5SnormBlock: return "Bc5SnormBlock";
+    case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
+    case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
+    case Format::eBc7UnormBlock: return "Bc7UnormBlock";
+    case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
+    case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
+    case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
+    case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
+    case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
+    case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
+    case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
+    case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
+    case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
+    case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
+    case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
+    case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
+    case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
+    case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
+    case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
+    case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
+    case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
+    case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
+    case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
+    case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
+    case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
+    case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
+    case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
+    case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
+    case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
+    case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
+    case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
+    case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
+    case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
+    case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
+    case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
+    case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
+    case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
+    case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
+    case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
+    case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
+    case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
+    case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
+    case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
+    case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
+    case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
+    case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
+    case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
+    case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
+    case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
+    case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
+    case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(StructureType value)
+  {
+    switch (value)
+    {
+    case StructureType::eApplicationInfo: return "ApplicationInfo";
+    case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
+    case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
+    case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
+    case StructureType::eSubmitInfo: return "SubmitInfo";
+    case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
+    case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
+    case StructureType::eBindSparseInfo: return "BindSparseInfo";
+    case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
+    case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
+    case StructureType::eEventCreateInfo: return "EventCreateInfo";
+    case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
+    case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
+    case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
+    case StructureType::eImageCreateInfo: return "ImageCreateInfo";
+    case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
+    case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
+    case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
+    case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
+    case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
+    case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
+    case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
+    case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
+    case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
+    case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
+    case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
+    case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
+    case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
+    case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
+    case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
+    case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
+    case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
+    case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
+    case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
+    case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
+    case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
+    case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
+    case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
+    case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
+    case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
+    case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
+    case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
+    case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
+    case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
+    case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
+    case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
+    case StructureType::eMemoryBarrier: return "MemoryBarrier";
+    case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
+    case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
+    case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
+    case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
+    case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
+    case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
+    case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
+    case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
+    case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
+    case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
+    case StructureType::eMirSurfaceCreateInfoKHR: return "MirSurfaceCreateInfoKHR";
+    case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
+    case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
+    case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
+    case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD";
+    case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
+    case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
+    case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
+    case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
+    case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
+    case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
+    case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD";
+    case StructureType::eRenderPassMultiviewCreateInfoKHX: return "RenderPassMultiviewCreateInfoKHX";
+    case StructureType::ePhysicalDeviceMultiviewFeaturesKHX: return "PhysicalDeviceMultiviewFeaturesKHX";
+    case StructureType::ePhysicalDeviceMultiviewPropertiesKHX: return "PhysicalDeviceMultiviewPropertiesKHX";
+    case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
+    case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
+    case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
+    case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
+    case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
+    case StructureType::ePhysicalDeviceFeatures2KHR: return "PhysicalDeviceFeatures2KHR";
+    case StructureType::ePhysicalDeviceProperties2KHR: return "PhysicalDeviceProperties2KHR";
+    case StructureType::eFormatProperties2KHR: return "FormatProperties2KHR";
+    case StructureType::eImageFormatProperties2KHR: return "ImageFormatProperties2KHR";
+    case StructureType::ePhysicalDeviceImageFormatInfo2KHR: return "PhysicalDeviceImageFormatInfo2KHR";
+    case StructureType::eQueueFamilyProperties2KHR: return "QueueFamilyProperties2KHR";
+    case StructureType::ePhysicalDeviceMemoryProperties2KHR: return "PhysicalDeviceMemoryProperties2KHR";
+    case StructureType::eSparseImageFormatProperties2KHR: return "SparseImageFormatProperties2KHR";
+    case StructureType::ePhysicalDeviceSparseImageFormatInfo2KHR: return "PhysicalDeviceSparseImageFormatInfo2KHR";
+    case StructureType::eMemoryAllocateFlagsInfoKHX: return "MemoryAllocateFlagsInfoKHX";
+    case StructureType::eBindBufferMemoryInfoKHX: return "BindBufferMemoryInfoKHX";
+    case StructureType::eBindImageMemoryInfoKHX: return "BindImageMemoryInfoKHX";
+    case StructureType::eDeviceGroupRenderPassBeginInfoKHX: return "DeviceGroupRenderPassBeginInfoKHX";
+    case StructureType::eDeviceGroupCommandBufferBeginInfoKHX: return "DeviceGroupCommandBufferBeginInfoKHX";
+    case StructureType::eDeviceGroupSubmitInfoKHX: return "DeviceGroupSubmitInfoKHX";
+    case StructureType::eDeviceGroupBindSparseInfoKHX: return "DeviceGroupBindSparseInfoKHX";
+    case StructureType::eDeviceGroupPresentCapabilitiesKHX: return "DeviceGroupPresentCapabilitiesKHX";
+    case StructureType::eImageSwapchainCreateInfoKHX: return "ImageSwapchainCreateInfoKHX";
+    case StructureType::eBindImageMemorySwapchainInfoKHX: return "BindImageMemorySwapchainInfoKHX";
+    case StructureType::eAcquireNextImageInfoKHX: return "AcquireNextImageInfoKHX";
+    case StructureType::eDeviceGroupPresentInfoKHX: return "DeviceGroupPresentInfoKHX";
+    case StructureType::eDeviceGroupSwapchainCreateInfoKHX: return "DeviceGroupSwapchainCreateInfoKHX";
+    case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
+    case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
+    case StructureType::ePhysicalDeviceGroupPropertiesKHX: return "PhysicalDeviceGroupPropertiesKHX";
+    case StructureType::eDeviceGroupDeviceCreateInfoKHX: return "DeviceGroupDeviceCreateInfoKHX";
+    case StructureType::ePhysicalDeviceExternalImageFormatInfoKHR: return "PhysicalDeviceExternalImageFormatInfoKHR";
+    case StructureType::eExternalImageFormatPropertiesKHR: return "ExternalImageFormatPropertiesKHR";
+    case StructureType::ePhysicalDeviceExternalBufferInfoKHR: return "PhysicalDeviceExternalBufferInfoKHR";
+    case StructureType::eExternalBufferPropertiesKHR: return "ExternalBufferPropertiesKHR";
+    case StructureType::ePhysicalDeviceIdPropertiesKHR: return "PhysicalDeviceIdPropertiesKHR";
+    case StructureType::eExternalMemoryBufferCreateInfoKHR: return "ExternalMemoryBufferCreateInfoKHR";
+    case StructureType::eExternalMemoryImageCreateInfoKHR: return "ExternalMemoryImageCreateInfoKHR";
+    case StructureType::eExportMemoryAllocateInfoKHR: return "ExportMemoryAllocateInfoKHR";
+    case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR";
+    case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR";
+    case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR";
+    case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR";
+    case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR";
+    case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR";
+    case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR";
+    case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR";
+    case StructureType::ePhysicalDeviceExternalSemaphoreInfoKHR: return "PhysicalDeviceExternalSemaphoreInfoKHR";
+    case StructureType::eExternalSemaphorePropertiesKHR: return "ExternalSemaphorePropertiesKHR";
+    case StructureType::eExportSemaphoreCreateInfoKHR: return "ExportSemaphoreCreateInfoKHR";
+    case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR";
+    case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR";
+    case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR";
+    case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR";
+    case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR";
+    case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR";
+    case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR";
+    case StructureType::ePhysicalDevice16BitStorageFeaturesKHR: return "PhysicalDevice16BitStorageFeaturesKHR";
+    case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR";
+    case StructureType::eDescriptorUpdateTemplateCreateInfoKHR: return "DescriptorUpdateTemplateCreateInfoKHR";
+    case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
+    case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
+    case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
+    case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
+    case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
+    case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
+    case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV";
+    case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
+    case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
+    case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
+    case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
+    case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
+    case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE";
+    case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
+    case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV";
+    case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT";
+    case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT";
+    case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT";
+    case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR";
+    case StructureType::ePhysicalDeviceExternalFenceInfoKHR: return "PhysicalDeviceExternalFenceInfoKHR";
+    case StructureType::eExternalFencePropertiesKHR: return "ExternalFencePropertiesKHR";
+    case StructureType::eExportFenceCreateInfoKHR: return "ExportFenceCreateInfoKHR";
+    case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR";
+    case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR";
+    case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR";
+    case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR";
+    case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR";
+    case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR";
+    case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR";
+    case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR";
+    case StructureType::ePhysicalDeviceVariablePointerFeaturesKHR: return "PhysicalDeviceVariablePointerFeaturesKHR";
+    case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK";
+    case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK";
+    case StructureType::eMemoryDedicatedRequirementsKHR: return "MemoryDedicatedRequirementsKHR";
+    case StructureType::eMemoryDedicatedAllocateInfoKHR: return "MemoryDedicatedAllocateInfoKHR";
+    case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT: return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT";
+    case StructureType::eSamplerReductionModeCreateInfoEXT: return "SamplerReductionModeCreateInfoEXT";
+    case StructureType::eBufferMemoryRequirementsInfo2KHR: return "BufferMemoryRequirementsInfo2KHR";
+    case StructureType::eImageMemoryRequirementsInfo2KHR: return "ImageMemoryRequirementsInfo2KHR";
+    case StructureType::eImageSparseMemoryRequirementsInfo2KHR: return "ImageSparseMemoryRequirementsInfo2KHR";
+    case StructureType::eMemoryRequirements2KHR: return "MemoryRequirements2KHR";
+    case StructureType::eSparseImageMemoryRequirements2KHR: return "SparseImageMemoryRequirements2KHR";
+    case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
+    case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
+    case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT";
+    case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV";
+    case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
+  {
+    switch (value)
+    {
+    case SubpassContents::eInline: return "Inline";
+    case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DynamicState value)
+  {
+    switch (value)
+    {
+    case DynamicState::eViewport: return "Viewport";
+    case DynamicState::eScissor: return "Scissor";
+    case DynamicState::eLineWidth: return "LineWidth";
+    case DynamicState::eDepthBias: return "DepthBias";
+    case DynamicState::eBlendConstants: return "BlendConstants";
+    case DynamicState::eDepthBounds: return "DepthBounds";
+    case DynamicState::eStencilCompareMask: return "StencilCompareMask";
+    case DynamicState::eStencilWriteMask: return "StencilWriteMask";
+    case DynamicState::eStencilReference: return "StencilReference";
+    case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV";
+    case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateTypeKHR value)
+  {
+    switch (value)
+    {
+    case DescriptorUpdateTemplateTypeKHR::eDescriptorSet: return "DescriptorSet";
+    case DescriptorUpdateTemplateTypeKHR::ePushDescriptors: return "PushDescriptors";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ObjectType value)
+  {
+    switch (value)
+    {
+    case ObjectType::eUnknown: return "Unknown";
+    case ObjectType::eInstance: return "Instance";
+    case ObjectType::ePhysicalDevice: return "PhysicalDevice";
+    case ObjectType::eDevice: return "Device";
+    case ObjectType::eQueue: return "Queue";
+    case ObjectType::eSemaphore: return "Semaphore";
+    case ObjectType::eCommandBuffer: return "CommandBuffer";
+    case ObjectType::eFence: return "Fence";
+    case ObjectType::eDeviceMemory: return "DeviceMemory";
+    case ObjectType::eBuffer: return "Buffer";
+    case ObjectType::eImage: return "Image";
+    case ObjectType::eEvent: return "Event";
+    case ObjectType::eQueryPool: return "QueryPool";
+    case ObjectType::eBufferView: return "BufferView";
+    case ObjectType::eImageView: return "ImageView";
+    case ObjectType::eShaderModule: return "ShaderModule";
+    case ObjectType::ePipelineCache: return "PipelineCache";
+    case ObjectType::ePipelineLayout: return "PipelineLayout";
+    case ObjectType::eRenderPass: return "RenderPass";
+    case ObjectType::ePipeline: return "Pipeline";
+    case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout";
+    case ObjectType::eSampler: return "Sampler";
+    case ObjectType::eDescriptorPool: return "DescriptorPool";
+    case ObjectType::eDescriptorSet: return "DescriptorSet";
+    case ObjectType::eFramebuffer: return "Framebuffer";
+    case ObjectType::eCommandPool: return "CommandPool";
+    case ObjectType::eSurfaceKHR: return "SurfaceKHR";
+    case ObjectType::eSwapchainKHR: return "SwapchainKHR";
+    case ObjectType::eDisplayKHR: return "DisplayKHR";
+    case ObjectType::eDisplayModeKHR: return "DisplayModeKHR";
+    case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
+    case ObjectType::eDescriptorUpdateTemplateKHR: return "DescriptorUpdateTemplateKHR";
+    case ObjectType::eObjectTableNVX: return "ObjectTableNVX";
+    case ObjectType::eIndirectCommandsLayoutNVX: return "IndirectCommandsLayoutNVX";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
+  {
+    switch (value)
+    {
+    case QueueFlagBits::eGraphics: return "Graphics";
+    case QueueFlagBits::eCompute: return "Compute";
+    case QueueFlagBits::eTransfer: return "Transfer";
+    case QueueFlagBits::eSparseBinding: return "SparseBinding";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & QueueFlagBits::eGraphics) result += "Graphics | ";
+    if (value & QueueFlagBits::eCompute) result += "Compute | ";
+    if (value & QueueFlagBits::eTransfer) result += "Transfer | ";
+    if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
+  {
+    switch (value)
+    {
+    case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
+    case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
+    case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
+    case MemoryPropertyFlagBits::eHostCached: return "HostCached";
+    case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | ";
+    if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | ";
+    if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | ";
+    if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | ";
+    if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
+  {
+    switch (value)
+    {
+    case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
+    case MemoryHeapFlagBits::eMultiInstanceKHX: return "MultiInstanceKHX";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | ";
+    if (value & MemoryHeapFlagBits::eMultiInstanceKHX) result += "MultiInstanceKHX | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
+  {
+    switch (value)
+    {
+    case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
+    case AccessFlagBits::eIndexRead: return "IndexRead";
+    case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
+    case AccessFlagBits::eUniformRead: return "UniformRead";
+    case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
+    case AccessFlagBits::eShaderRead: return "ShaderRead";
+    case AccessFlagBits::eShaderWrite: return "ShaderWrite";
+    case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
+    case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
+    case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
+    case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
+    case AccessFlagBits::eTransferRead: return "TransferRead";
+    case AccessFlagBits::eTransferWrite: return "TransferWrite";
+    case AccessFlagBits::eHostRead: return "HostRead";
+    case AccessFlagBits::eHostWrite: return "HostWrite";
+    case AccessFlagBits::eMemoryRead: return "MemoryRead";
+    case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
+    case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
+    case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
+    case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | ";
+    if (value & AccessFlagBits::eIndexRead) result += "IndexRead | ";
+    if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | ";
+    if (value & AccessFlagBits::eUniformRead) result += "UniformRead | ";
+    if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | ";
+    if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | ";
+    if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | ";
+    if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | ";
+    if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | ";
+    if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | ";
+    if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | ";
+    if (value & AccessFlagBits::eTransferRead) result += "TransferRead | ";
+    if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | ";
+    if (value & AccessFlagBits::eHostRead) result += "HostRead | ";
+    if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
+    if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
+    if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
+    if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
+    if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
+    if (value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT) result += "ColorAttachmentReadNoncoherentEXT | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
+  {
+    switch (value)
+    {
+    case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
+    case BufferUsageFlagBits::eTransferDst: return "TransferDst";
+    case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
+    case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
+    case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
+    case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
+    case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
+    case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
+    case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
+    if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | ";
+    if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
+    if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
+    if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | ";
+    if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | ";
+    if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | ";
+    if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | ";
+    if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
+  {
+    switch (value)
+    {
+    case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
+    case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
+    case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
+    if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
+    if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
+  {
+    switch (value)
+    {
+    case ShaderStageFlagBits::eVertex: return "Vertex";
+    case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
+    case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
+    case ShaderStageFlagBits::eGeometry: return "Geometry";
+    case ShaderStageFlagBits::eFragment: return "Fragment";
+    case ShaderStageFlagBits::eCompute: return "Compute";
+    case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
+    case ShaderStageFlagBits::eAll: return "All";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ShaderStageFlagBits::eVertex) result += "Vertex | ";
+    if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | ";
+    if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | ";
+    if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | ";
+    if (value & ShaderStageFlagBits::eFragment) result += "Fragment | ";
+    if (value & ShaderStageFlagBits::eCompute) result += "Compute | ";
+    if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | ";
+    if (value & ShaderStageFlagBits::eAll) result += "All | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
+  {
+    switch (value)
+    {
+    case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
+    case ImageUsageFlagBits::eTransferDst: return "TransferDst";
+    case ImageUsageFlagBits::eSampled: return "Sampled";
+    case ImageUsageFlagBits::eStorage: return "Storage";
+    case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
+    case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
+    case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
+    case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
+    if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | ";
+    if (value & ImageUsageFlagBits::eSampled) result += "Sampled | ";
+    if (value & ImageUsageFlagBits::eStorage) result += "Storage | ";
+    if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | ";
+    if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
+    if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | ";
+    if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
+  {
+    switch (value)
+    {
+    case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
+    case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
+    case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
+    case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
+    case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
+    case ImageCreateFlagBits::eBindSfrKHX: return "BindSfrKHX";
+    case ImageCreateFlagBits::e2DArrayCompatibleKHR: return "2DArrayCompatibleKHR";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
+    if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
+    if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
+    if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | ";
+    if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | ";
+    if (value & ImageCreateFlagBits::eBindSfrKHX) result += "BindSfrKHX | ";
+    if (value & ImageCreateFlagBits::e2DArrayCompatibleKHR) result += "2DArrayCompatibleKHR | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
+  {
+    switch (value)
+    {
+    case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
+    case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
+    case PipelineCreateFlagBits::eDerivative: return "Derivative";
+    case PipelineCreateFlagBits::eViewIndexFromDeviceIndexKHX: return "ViewIndexFromDeviceIndexKHX";
+    case PipelineCreateFlagBits::eDispatchBaseKHX: return "DispatchBaseKHX";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | ";
+    if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | ";
+    if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | ";
+    if (value & PipelineCreateFlagBits::eViewIndexFromDeviceIndexKHX) result += "ViewIndexFromDeviceIndexKHX | ";
+    if (value & PipelineCreateFlagBits::eDispatchBaseKHX) result += "DispatchBaseKHX | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
+  {
+    switch (value)
+    {
+    case ColorComponentFlagBits::eR: return "R";
+    case ColorComponentFlagBits::eG: return "G";
+    case ColorComponentFlagBits::eB: return "B";
+    case ColorComponentFlagBits::eA: return "A";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ColorComponentFlagBits::eR) result += "R | ";
+    if (value & ColorComponentFlagBits::eG) result += "G | ";
+    if (value & ColorComponentFlagBits::eB) result += "B | ";
+    if (value & ColorComponentFlagBits::eA) result += "A | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
+  {
+    switch (value)
+    {
+    case FenceCreateFlagBits::eSignaled: return "Signaled";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
+  {
+    switch (value)
+    {
+    case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
+    case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
+    case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
+    case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
+    case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
+    case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
+    case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
+    case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
+    case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
+    case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
+    case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
+    case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
+    case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
+    case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
+    case FormatFeatureFlagBits::eTransferSrcKHR: return "TransferSrcKHR";
+    case FormatFeatureFlagBits::eTransferDstKHR: return "TransferDstKHR";
+    case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT: return "SampledImageFilterMinmaxEXT";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | ";
+    if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | ";
+    if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | ";
+    if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
+    if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
+    if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | ";
+    if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | ";
+    if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | ";
+    if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | ";
+    if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
+    if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | ";
+    if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | ";
+    if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | ";
+    if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | ";
+    if (value & FormatFeatureFlagBits::eTransferSrcKHR) result += "TransferSrcKHR | ";
+    if (value & FormatFeatureFlagBits::eTransferDstKHR) result += "TransferDstKHR | ";
+    if (value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) result += "SampledImageFilterMinmaxEXT | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
+  {
+    switch (value)
+    {
+    case QueryControlFlagBits::ePrecise: return "Precise";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & QueryControlFlagBits::ePrecise) result += "Precise | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
+  {
+    switch (value)
+    {
+    case QueryResultFlagBits::e64: return "64";
+    case QueryResultFlagBits::eWait: return "Wait";
+    case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
+    case QueryResultFlagBits::ePartial: return "Partial";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & QueryResultFlagBits::e64) result += "64 | ";
+    if (value & QueryResultFlagBits::eWait) result += "Wait | ";
+    if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | ";
+    if (value & QueryResultFlagBits::ePartial) result += "Partial | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
+  {
+    switch (value)
+    {
+    case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
+    case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
+    case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | ";
+    if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | ";
+    if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
+  {
+    switch (value)
+    {
+    case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
+    case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
+    case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
+    case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
+    case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
+    case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
+    case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
+    case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
+    case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
+    case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations";
+    case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | ";
+    if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | ";
+    if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | ";
+    if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | ";
+    if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | ";
+    if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | ";
+    if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | ";
+    if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | ";
+    if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | ";
+    if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | ";
+    if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
+  {
+    switch (value)
+    {
+    case ImageAspectFlagBits::eColor: return "Color";
+    case ImageAspectFlagBits::eDepth: return "Depth";
+    case ImageAspectFlagBits::eStencil: return "Stencil";
+    case ImageAspectFlagBits::eMetadata: return "Metadata";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ImageAspectFlagBits::eColor) result += "Color | ";
+    if (value & ImageAspectFlagBits::eDepth) result += "Depth | ";
+    if (value & ImageAspectFlagBits::eStencil) result += "Stencil | ";
+    if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
+  {
+    switch (value)
+    {
+    case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
+    case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
+    case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | ";
+    if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | ";
+    if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
+  {
+    switch (value)
+    {
+    case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
+  {
+    switch (value)
+    {
+    case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
+    case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
+    case PipelineStageFlagBits::eVertexInput: return "VertexInput";
+    case PipelineStageFlagBits::eVertexShader: return "VertexShader";
+    case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
+    case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
+    case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
+    case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
+    case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
+    case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
+    case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
+    case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
+    case PipelineStageFlagBits::eTransfer: return "Transfer";
+    case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
+    case PipelineStageFlagBits::eHost: return "Host";
+    case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
+    case PipelineStageFlagBits::eAllCommands: return "AllCommands";
+    case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | ";
+    if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | ";
+    if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | ";
+    if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | ";
+    if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | ";
+    if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | ";
+    if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | ";
+    if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | ";
+    if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | ";
+    if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | ";
+    if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | ";
+    if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | ";
+    if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | ";
+    if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | ";
+    if (value & PipelineStageFlagBits::eHost) result += "Host | ";
+    if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
+    if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
+    if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
+  {
+    switch (value)
+    {
+    case CommandPoolCreateFlagBits::eTransient: return "Transient";
+    case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | ";
+    if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
+  {
+    switch (value)
+    {
+    case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
+  {
+    switch (value)
+    {
+    case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
+  {
+    switch (value)
+    {
+    case SampleCountFlagBits::e1: return "1";
+    case SampleCountFlagBits::e2: return "2";
+    case SampleCountFlagBits::e4: return "4";
+    case SampleCountFlagBits::e8: return "8";
+    case SampleCountFlagBits::e16: return "16";
+    case SampleCountFlagBits::e32: return "32";
+    case SampleCountFlagBits::e64: return "64";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SampleCountFlagBits::e1) result += "1 | ";
+    if (value & SampleCountFlagBits::e2) result += "2 | ";
+    if (value & SampleCountFlagBits::e4) result += "4 | ";
+    if (value & SampleCountFlagBits::e8) result += "8 | ";
+    if (value & SampleCountFlagBits::e16) result += "16 | ";
+    if (value & SampleCountFlagBits::e32) result += "32 | ";
+    if (value & SampleCountFlagBits::e64) result += "64 | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
+  {
+    switch (value)
+    {
+    case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
+  {
+    switch (value)
+    {
+    case StencilFaceFlagBits::eFront: return "Front";
+    case StencilFaceFlagBits::eBack: return "Back";
+    case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & StencilFaceFlagBits::eFront) result += "Front | ";
+    if (value & StencilFaceFlagBits::eBack) result += "Back | ";
+    if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
+  {
+    switch (value)
+    {
+    case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
+  {
+    switch (value)
+    {
+    case DependencyFlagBits::eByRegion: return "ByRegion";
+    case DependencyFlagBits::eViewLocalKHX: return "ViewLocalKHX";
+    case DependencyFlagBits::eDeviceGroupKHX: return "DeviceGroupKHX";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & DependencyFlagBits::eByRegion) result += "ByRegion | ";
+    if (value & DependencyFlagBits::eViewLocalKHX) result += "ViewLocalKHX | ";
+    if (value & DependencyFlagBits::eDeviceGroupKHX) result += "DeviceGroupKHX | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
+  {
+    switch (value)
+    {
+    case PresentModeKHR::eImmediate: return "Immediate";
+    case PresentModeKHR::eMailbox: return "Mailbox";
+    case PresentModeKHR::eFifo: return "Fifo";
+    case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
+    case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh";
+    case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
+  {
+    switch (value)
+    {
+    case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
+    case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
+    case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT";
+    case ColorSpaceKHR::eDciP3LinearEXT: return "DciP3LinearEXT";
+    case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
+    case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
+    case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
+    case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
+    case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT";
+    case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT";
+    case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT";
+    case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
+    case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
+    case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT";
+    case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
+    case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
+    case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
+    case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
+    if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | ";
+    if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | ";
+    if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
+    case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
+    case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
+    case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
+    if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | ";
+    if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | ";
+    if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
+    case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
+    case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
+    case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
+    case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
+    case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
+    case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
+    case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
+    case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | ";
+    if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | ";
+    if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | ";
+    if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | ";
+    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | ";
+    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | ";
+    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | ";
+    if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | ";
+    if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
+  {
+    switch (value)
+    {
+    case DebugReportFlagBitsEXT::eInformation: return "Information";
+    case DebugReportFlagBitsEXT::eWarning: return "Warning";
+    case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
+    case DebugReportFlagBitsEXT::eError: return "Error";
+    case DebugReportFlagBitsEXT::eDebug: return "Debug";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | ";
+    if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | ";
+    if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | ";
+    if (value & DebugReportFlagBitsEXT::eError) result += "Error | ";
+    if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
+  {
+    switch (value)
+    {
+    case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
+    case DebugReportObjectTypeEXT::eInstance: return "Instance";
+    case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
+    case DebugReportObjectTypeEXT::eDevice: return "Device";
+    case DebugReportObjectTypeEXT::eQueue: return "Queue";
+    case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
+    case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
+    case DebugReportObjectTypeEXT::eFence: return "Fence";
+    case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
+    case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
+    case DebugReportObjectTypeEXT::eImage: return "Image";
+    case DebugReportObjectTypeEXT::eEvent: return "Event";
+    case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
+    case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
+    case DebugReportObjectTypeEXT::eImageView: return "ImageView";
+    case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
+    case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
+    case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
+    case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
+    case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
+    case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
+    case DebugReportObjectTypeEXT::eSampler: return "Sampler";
+    case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
+    case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
+    case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
+    case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
+    case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
+    case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
+    case DebugReportObjectTypeEXT::eDebugReportCallbackExt: return "DebugReportCallbackExt";
+    case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
+    case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
+    case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
+    case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
+    case DebugReportObjectTypeEXT::eDescriptorUpdateTemplateKHR: return "DescriptorUpdateTemplateKHR";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
+  {
+    switch (value)
+    {
+    case RasterizationOrderAMD::eStrict: return "Strict";
+    case RasterizationOrderAMD::eRelaxed: return "Relaxed";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
+  {
+    switch (value)
+    {
+    case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
+    case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+    case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
+    case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
+  {
+    switch (value)
+    {
+    case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
+    case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
+    case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | ";
+    if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | ";
+    if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
+  {
+    switch (value)
+    {
+    case ValidationCheckEXT::eAll: return "All";
+    case ValidationCheckEXT::eShaders: return "Shaders";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
+  {
+    switch (value)
+    {
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
+    case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
+    if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
+  {
+    switch (value)
+    {
+    case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
+    case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
+    if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
+  {
+    switch (value)
+    {
+    case IndirectCommandsTokenTypeNVX::ePipeline: return "Pipeline";
+    case IndirectCommandsTokenTypeNVX::eDescriptorSet: return "DescriptorSet";
+    case IndirectCommandsTokenTypeNVX::eIndexBuffer: return "IndexBuffer";
+    case IndirectCommandsTokenTypeNVX::eVertexBuffer: return "VertexBuffer";
+    case IndirectCommandsTokenTypeNVX::ePushConstant: return "PushConstant";
+    case IndirectCommandsTokenTypeNVX::eDrawIndexed: return "DrawIndexed";
+    case IndirectCommandsTokenTypeNVX::eDraw: return "Draw";
+    case IndirectCommandsTokenTypeNVX::eDispatch: return "Dispatch";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
+  {
+    switch (value)
+    {
+    case ObjectEntryTypeNVX::eDescriptorSet: return "DescriptorSet";
+    case ObjectEntryTypeNVX::ePipeline: return "Pipeline";
+    case ObjectEntryTypeNVX::eIndexBuffer: return "IndexBuffer";
+    case ObjectEntryTypeNVX::eVertexBuffer: return "VertexBuffer";
+    case ObjectEntryTypeNVX::ePushConstant: return "PushConstant";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits value)
+  {
+    switch (value)
+    {
+    case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) result += "PushDescriptorKHR | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd: return "OpaqueFd";
+    case ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueWin32: return "OpaqueWin32";
+    case ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+    case ExternalMemoryHandleTypeFlagBitsKHR::eD3D11Texture: return "D3D11Texture";
+    case ExternalMemoryHandleTypeFlagBitsKHR::eD3D11TextureKmt: return "D3D11TextureKmt";
+    case ExternalMemoryHandleTypeFlagBitsKHR::eD3D12Heap: return "D3D12Heap";
+    case ExternalMemoryHandleTypeFlagBitsKHR::eD3D12Resource: return "D3D12Resource";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueFd) result += "OpaqueFd | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueWin32) result += "OpaqueWin32 | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsKHR::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsKHR::eD3D11Texture) result += "D3D11Texture | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsKHR::eD3D11TextureKmt) result += "D3D11TextureKmt | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsKHR::eD3D12Heap) result += "D3D12Heap | ";
+    if (value & ExternalMemoryHandleTypeFlagBitsKHR::eD3D12Resource) result += "D3D12Resource | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case ExternalMemoryFeatureFlagBitsKHR::eDedicatedOnly: return "DedicatedOnly";
+    case ExternalMemoryFeatureFlagBitsKHR::eExportable: return "Exportable";
+    case ExternalMemoryFeatureFlagBitsKHR::eImportable: return "Importable";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalMemoryFeatureFlagBitsKHR::eDedicatedOnly) result += "DedicatedOnly | ";
+    if (value & ExternalMemoryFeatureFlagBitsKHR::eExportable) result += "Exportable | ";
+    if (value & ExternalMemoryFeatureFlagBitsKHR::eImportable) result += "Importable | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd: return "OpaqueFd";
+    case ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueWin32: return "OpaqueWin32";
+    case ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+    case ExternalSemaphoreHandleTypeFlagBitsKHR::eD3D12Fence: return "D3D12Fence";
+    case ExternalSemaphoreHandleTypeFlagBitsKHR::eSyncFd: return "SyncFd";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueFd) result += "OpaqueFd | ";
+    if (value & ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueWin32) result += "OpaqueWin32 | ";
+    if (value & ExternalSemaphoreHandleTypeFlagBitsKHR::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+    if (value & ExternalSemaphoreHandleTypeFlagBitsKHR::eD3D12Fence) result += "D3D12Fence | ";
+    if (value & ExternalSemaphoreHandleTypeFlagBitsKHR::eSyncFd) result += "SyncFd | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case ExternalSemaphoreFeatureFlagBitsKHR::eExportable: return "Exportable";
+    case ExternalSemaphoreFeatureFlagBitsKHR::eImportable: return "Importable";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalSemaphoreFeatureFlagBitsKHR::eExportable) result += "Exportable | ";
+    if (value & ExternalSemaphoreFeatureFlagBitsKHR::eImportable) result += "Importable | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case SemaphoreImportFlagBitsKHR::eTemporary: return "Temporary";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SemaphoreImportFlagBitsKHR::eTemporary) result += "Temporary | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd: return "OpaqueFd";
+    case ExternalFenceHandleTypeFlagBitsKHR::eOpaqueWin32: return "OpaqueWin32";
+    case ExternalFenceHandleTypeFlagBitsKHR::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+    case ExternalFenceHandleTypeFlagBitsKHR::eSyncFd: return "SyncFd";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalFenceHandleTypeFlagBitsKHR::eOpaqueFd) result += "OpaqueFd | ";
+    if (value & ExternalFenceHandleTypeFlagBitsKHR::eOpaqueWin32) result += "OpaqueWin32 | ";
+    if (value & ExternalFenceHandleTypeFlagBitsKHR::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
+    if (value & ExternalFenceHandleTypeFlagBitsKHR::eSyncFd) result += "SyncFd | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case ExternalFenceFeatureFlagBitsKHR::eExportable: return "Exportable";
+    case ExternalFenceFeatureFlagBitsKHR::eImportable: return "Importable";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & ExternalFenceFeatureFlagBitsKHR::eExportable) result += "Exportable | ";
+    if (value & ExternalFenceFeatureFlagBitsKHR::eImportable) result += "Importable | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FenceImportFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case FenceImportFlagBitsKHR::eTemporary: return "Temporary";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(FenceImportFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & FenceImportFlagBitsKHR::eTemporary) result += "Temporary | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagBitsEXT value)
+  {
+    switch (value)
+    {
+    case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagsEXT value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SurfaceCounterFlagBitsEXT::eVblank) result += "Vblank | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplayPowerStateEXT value)
+  {
+    switch (value)
+    {
+    case DisplayPowerStateEXT::eOff: return "Off";
+    case DisplayPowerStateEXT::eSuspend: return "Suspend";
+    case DisplayPowerStateEXT::eOn: return "On";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DeviceEventTypeEXT value)
+  {
+    switch (value)
+    {
+    case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DisplayEventTypeEXT value)
+  {
+    switch (value)
+    {
+    case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlagBitsKHX value)
+  {
+    switch (value)
+    {
+    case PeerMemoryFeatureFlagBitsKHX::eCopySrc: return "CopySrc";
+    case PeerMemoryFeatureFlagBitsKHX::eCopyDst: return "CopyDst";
+    case PeerMemoryFeatureFlagBitsKHX::eGenericSrc: return "GenericSrc";
+    case PeerMemoryFeatureFlagBitsKHX::eGenericDst: return "GenericDst";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlagsKHX value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & PeerMemoryFeatureFlagBitsKHX::eCopySrc) result += "CopySrc | ";
+    if (value & PeerMemoryFeatureFlagBitsKHX::eCopyDst) result += "CopyDst | ";
+    if (value & PeerMemoryFeatureFlagBitsKHX::eGenericSrc) result += "GenericSrc | ";
+    if (value & PeerMemoryFeatureFlagBitsKHX::eGenericDst) result += "GenericDst | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlagBitsKHX value)
+  {
+    switch (value)
+    {
+    case MemoryAllocateFlagBitsKHX::eDeviceMask: return "DeviceMask";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlagsKHX value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & MemoryAllocateFlagBitsKHX::eDeviceMask) result += "DeviceMask | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagBitsKHX value)
+  {
+    switch (value)
+    {
+    case DeviceGroupPresentModeFlagBitsKHX::eLocal: return "Local";
+    case DeviceGroupPresentModeFlagBitsKHX::eRemote: return "Remote";
+    case DeviceGroupPresentModeFlagBitsKHX::eSum: return "Sum";
+    case DeviceGroupPresentModeFlagBitsKHX::eLocalMultiDevice: return "LocalMultiDevice";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagsKHX value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & DeviceGroupPresentModeFlagBitsKHX::eLocal) result += "Local | ";
+    if (value & DeviceGroupPresentModeFlagBitsKHX::eRemote) result += "Remote | ";
+    if (value & DeviceGroupPresentModeFlagBitsKHX::eSum) result += "Sum | ";
+    if (value & DeviceGroupPresentModeFlagBitsKHX::eLocalMultiDevice) result += "LocalMultiDevice | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR value)
+  {
+    switch (value)
+    {
+    case SwapchainCreateFlagBitsKHR::eBindSfrKHX: return "BindSfrKHX";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SwapchainCreateFlagBitsKHR::eBindSfrKHX) result += "BindSfrKHX | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(ViewportCoordinateSwizzleNV value)
+  {
+    switch (value)
+    {
+    case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX";
+    case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX";
+    case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY";
+    case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY";
+    case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ";
+    case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ";
+    case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW";
+    case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(DiscardRectangleModeEXT value)
+  {
+    switch (value)
+    {
+    case DiscardRectangleModeEXT::eInclusive: return "Inclusive";
+    case DiscardRectangleModeEXT::eExclusive: return "Exclusive";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits value)
+  {
+    switch (value)
+    {
+    case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX";
+    case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags value)
+  {
+    if (!value) return "{}";
+    std::string result;
+    if (value & SubpassDescriptionFlagBits::ePerViewAttributesNVX) result += "PerViewAttributesNVX | ";
+    if (value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) result += "PerViewPositionXOnlyNVX | ";
+    return "{" + result.substr(0, result.size() - 3) + "}";
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(SamplerReductionModeEXT value)
+  {
+    switch (value)
+    {
+    case SamplerReductionModeEXT::eWeightedAverage: return "WeightedAverage";
+    case SamplerReductionModeEXT::eMin: return "Min";
+    case SamplerReductionModeEXT::eMax: return "Max";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(BlendOverlapEXT value)
+  {
+    switch (value)
+    {
+    case BlendOverlapEXT::eUncorrelated: return "Uncorrelated";
+    case BlendOverlapEXT::eDisjoint: return "Disjoint";
+    case BlendOverlapEXT::eConjoint: return "Conjoint";
+    default: return "invalid";
+    }
+  }
+
+  VULKAN_HPP_INLINE std::string to_string(CoverageModulationModeNV value)
+  {
+    switch (value)
+    {
+    case CoverageModulationModeNV::eNone: return "None";
+    case CoverageModulationModeNV::eRgb: return "Rgb";
+    case CoverageModulationModeNV::eAlpha: return "Alpha";
+    case CoverageModulationModeNV::eRgba: return "Rgba";
+    default: return "invalid";
+    }
+  }
+
+} // namespace vk
+
+#endif
diff --git a/lib/SDL2.lib b/lib/SDL2.lib
new file mode 100755
index 0000000000000000000000000000000000000000..12caaa794246d61b53a2fc16e902bf07fb616d98
GIT binary patch
literal 120720
zcmeHwd7NBDwSU#HBO(GKB4AhqM3#^R0wR)R5)zn%ab}VLqM>J|lQi^958XXUKtx1D
zL_|bH+z?R_5fKql5fKp)5fu@U$Ir*}x$qDXQ4tXlf8SH<sk-+T({m^9uQ#8Z^u6bt
zs&DPLmQz)`9bRb;tvO=L1E+<52hE;7W6mLmPM<y}<WD!RGY*}8h<-k4OCdHsRfykQ
zCB*OU5aN+%gq%^=bjLM95`Alqrthy3lIYt{Xu7ReNTR!@Yx;f{<e;0+({%fQkcw``
zFX(5N8iM}MKzAOl>3dBf6+M7oqOCRkVm~2?9=OxcN%#f*XpyFSek~-?&-c-E&lz|p
z+ELR_779sp-{zWrbS>P0euyv={rFd!eu6j>eRrCspFM~0f$jhf(fxO5`tI3~gYHKd
zh;Bhx6}9mTx)<>v`sv}C?uDG_r&nsa7x5zc;f<Ppd^+$!cR#A>$J^i?boV)$e*P@t
z1^U5pntpyU-a$7%rs<nKLMplrzo1*-mgrj_H}pJyLAMSYLOR|G`u0AC&c`q4yN_tP
z^9tk#===9*y6Y0;E78-MesC+o0lIsdru*thSE3(l`VP`Y5z_xVpxY2nqVFKQM3?<V
z(>dD;N%Z9tHC=R}kVGHaPt%?VAJNVT2hlD^Xu1e_0uoPbrRnkegdloiy{11LAOz7L
z57YFxfE@JLotpk|DdeC(Ua9HvHIT0n^23WX?R~Y7Ao1igisU9p7m)b-HHzej5Q?tb
zSV*GlchV%0-XQVpfFjw|7K$!8K}ezxUZClc+wczh;9iEF!tYZ;JpG^|DQCc+8AALO
z@lk|${*~xgnx0xH1krN{A4neYgr+ZEE+o+rC^tk`0F&sZO*L(FvyepBjv7LlycYDu
z!!*79GeRmtI=>xs)>fL%yiZ7wc<dxaato9LqIb^IbT-Nw(OC~`I(seNLGS3+boMXt
z4tmEmnr_}5<regHlm()%-Jt0keL@m_eJ4%dKv;>sj`Bpb|MQx5y+}xqc;eTZ{&chu
zAi4Xnrqh~2677$&M6}CwnofHNe9*2aKZ<t6FX(jSyQ1sy3p(X?O<Ud}BuM<}22D@h
zh;&CeUN>myF8qSthI}UycqW>Oc!I>A?@}b|`wC4L;}`U$BMm|COVInv!G<o!@8v>X
zy1Ay^P<M!y|60>Dq$km<?$dPKlR^?LKV8#tC|^X&kQPM8FE@lVJRWqyG(+d&m*{a#
z%MfRx<=bmI9C08z2<b%hR_GF~-9eG~{RM(ZZn38#@%yLo{<M%Eou%m`cM3_g1?hms
zdkg_%95ibuLucamOw_Sm70Kg}7epsMujx9JQ=&}}KcY9EtVlewz0ia@^9<41n*LE2
z0wlLQLDMI06_RLVji&3*K|KYXjCc@Tyj0WW2M9@Y=~_*@J&SY)eFAc#!;jPSarjO2
zI_MF7{Fj<u_XyrWAG%!AwCj*h*9q~r^9><={zim&g5;(*Xxa>UOZ1iVHMQX<(HHj9
z^p(GW4{ATGY5J8y63rEwZahgyMaZigL3{LQnm!$RpuNB&diB|cp2RO`Z{!ov0i&As
zIuUxHS8uIp_XkmrK)ao*Y2GbD5?zU~63vI-L`R{#fW*HBH9bF12#{QGoThI0M>HQ{
zCb}BwL$v5#O`lmPB+;j?)ikdcIH0SLmPALLtLf?)a0fc-X-#vH2SlH}RMSGlh3K;=
zk3<V@*L20*X#YT;TdwJvJA@>96Vim}thT1LNPk7}e=X<(NEf0j!6#ZYsObh^D7p&2
zpid(VMBR^T+6TWxpE^v_T;v<kRqHh^gd3u(HrMo-8xVicmCtGFJ_G)MKK-Dkjh3Rl
z2EFB0O`B|qdH~w^qnciFo{&UaU#)3dJQHoz)U*xCha#l+HlVE@(exsu8_`Ri()8jx
zg;X?vU(nWbG`;i)A&FkHqo$W$hj-9c8*6&)n2?GP$Jc`P>(TU)6H)&``(CE$&>Mv$
zIt-YKkVl7sUc0%befNVN=nIe&t^ByAYrF7$0$q;y61}NU(_4;4dVu!3Nz?wjqn!us
zbGoM4%Y`Jm>6eB^@eBGW(g`G<yik#x^;@(*zr{EGNKH?pA3*fas}#vge}(n{w8w3l
z_Phf95zywhX!`t_LK2;Zco41LUQ_KzA&Dv{X<BuSkVHclYZ~5ENJS_s!$fUOmHVL&
zs@|h%^||nm=qXKW?h{hcAbvqL_(^oqlbQw>3Q1H2F45qPnud@zL?>-!Xgz*G+toE~
z`!vc8Xs7cGZI55jn~{D*uS9uL^ca3Y+anxAuk6zFvK}FccDz#4_9sFP+W7=cJIoQ1
z=oLF@+TkX=gI;!rrX3MBqE}q1>E+XfRCFzVLE8b7=)hi0Z@5iJq5};bxu1|k@4QOW
z2OdKG1zijbqLYzFL}v_Ydh4$Eo`F`tKSj`40on{0M4R_%dh4^$1AP_YQFIx8L1)~p
z>BFcCMEh*5NIcpj6#X0RK1gnMmZnXQMtcuB=L}7+T7&O5Xy%cc)?Xnc(J>EdTJpS*
zM921MItFEzX#FfrOA$AsRflU@x(e^0#n)(BvJ>1BAwP+Fr)lazSc#54S5pu0i3Z>o
zQQsm>0|M`$qfrJFJ&RvZ|2~@fkXA&;8X7>@i25(l^v3&zB<e*PD0&9Jpf@6nM3<dq
zXea!FUUPz>+wglE((vn={*7-Y(VtIJBu{}J(aBG0y6yq=4?s=uh+0SwqOpaVPJK*B
zqEYaOT32goY%V0x*bSQ6+u-{OY9J0oqnB$sbw{`-LU@T9h!ausZcU$m1oa<u4ay7A
z41|T~fQL05e4UUW@vnO{{o_&ePeA`f-yS3nxzG^O<`B>vqzy<sw}T<{Yo0qBeM{&P
zeG+jYI%0cGpF|vpUJn`3C)aCw{grqJefcg;U)dS`2++uVnyyFs5nZ&CrjMbl5gjs3
z(}nj4N%Y>Onl3yJ@1XM^()9joge1BEb&lwLk7{}k;y`r4$2GnG2<U;{i+m;e7~)HG
zFnB~C13uC0yEVP5Pe`J7uQ7zQd^hM^xKZ>henIDT8@e99p!e)*=zRQw-gS$nbMcKK
zdf(}Ww#F~0>(_=h#V_bZ`)b-2VIlhbmYOz(TcYz39*}tcdPVXLC!qZS9ki{XbMXrj
z!-~Z3_d<VoFVuCU3(=96X`1s8@)vYyx26-Pqn}H3v8FRuVVni}T92kPH^%$M7)#!+
zNUnq~(dX{e^g76i79Xxi{CgVe%e0j%7cc2Owr^l%_k#ZUeT#bsj_q67-`z7qtX#Qt
zSx-+^D&9_FEgV?1Z0Rv8yXOwf6=1GBs@m?YSK6x@%@H`Sjf{~`0hiuKD)o9}h$$Eb
z$%bN?VUVm@jfIv&(pAlBmHDY&iNCtqu8yu{62l-_E0!4s$xdxGnr-Gxzmudg*kXKo
zBW|rVTw7h^E-Xh&TVs_W9>7{lb4c25jE{{~o6J|s(b9TjT_n{Ul8#hI5NhVsFpO+y
z?6gou3?mz9tmPqsL&dQ!N5&$TV32IxP^(@YWirFCvJjGl7E?V}cwjAu+_u`y&|kx_
zvd~`=T7SpetLE5uU+y^9`4ONiH{clQ@Hme#gv2niW|fyA$cSNN<6}JQAR~rFTo=je
zU>MoZnn*?ri<c2%#@0!J!O|aZ#2}ZocB8^1>XrE8kwCwbq#n9ZugXOv7<ZC-B+>8M
zO(YukvP!#AV<A(o<OUC`LjEe72JJ?jNf1S1kQ?<lo)LoMr1gvx94AG%VxNiS<ioLa
z*vf{2T7iHN!?+vG2o=XjhbpaVM!_<j4|z(XfmUm%GRjk|r5WVITJ!AdfaNIvN5`7A
z(RSe0vb1aa#@$;UX;g+9LDNUCir=d7_Mk$qir=mTe0sI~87zq_5X?}Lp=yoijCv&(
zLyfW1TD8^OH^<U)9xxj&{PNhUSJGY;HsR`3@#~ESuUM<pJ4xz|)hxX9M%=N+I>x0p
z;vzrlgFK{$p=3cq*>u7$52t!n+NTDepccJae&7HCU2DPwx9N>vuC89mpW0AkbnVK<
zSkRg?rq>y&H(LB#p_Me(X|c3WaurssiHcGi<w+Q@$%p=Gdr74>+E=X%^DkDwCcVDu
z>RPK^ZO$FzjWy?z*0RxNu3pI3{*B>7KeyGYHrq9H=Y84ltA-L2N~gN0GCEujzRL(~
zMoO;GFY2zYs-OwYh&^0F$@r0WoRP+^bbMF@f#@6=A-U<RqC*i2d^mH(<Zf=eF;W{E
zXr9*77&?W)gnX}=tEQM<XW1yaidoML+w0Ar+n+%sp4UL$WHgB9HIN(`4dQu?xx@U!
zpyNe6uK~>7;M=T~4BP7gIZw~9y&mxM^bDKy;ABCw88lmtMRKUda>pY1{Cc(0j5QdO
zbYii7SFX_oZ&#O$x2x+}+RbNN(xOa_bv%zWOsR>L0ygRCjE~hgm$bU8Xm1*)#cBbY
z^mI7;t0R@MHH{{3-(m^#HBa66>_w!Hw-uFOL|{2air8fpER*>#P#dW>#(Cj19$A6W
z?QOIoCB-q)<(2w4k0SigERyT^&TowJ0?wGEV<H(Tajx<Se)Qu@UE$wxa$}fQXI`^W
z8Aiv3hnUnn-)cr_L`sUq<u)aJGiBARL}{yXIj{yl*?1Z@>7jz?x22U4o@61PG_~5|
zZdUjmEAS~$asGfyTBxwvRd2J_Xx7@%2QZS63rYaZX2^_5I^ETFWoS)CD`1n}Ku^EU
zY37yZ84UzE6cfY95d2U^43hP&SlDcgEUeXeA~Gh^>193`jb7%*k}983&<e!!J|Mqa
z?L0LnAl<n%_0YG1fmseo7h(<}5E_O;L77=LhdJ!i3@htdaL~b#RI#jlURJJHR^GE<
zChroH?}}yR^RjZqvhtn<2hYmjgJtFOvU0^D`3jV|Y7jZgandMyO77yP-l7P49`hrJ
zqVWSfrx8Wc_`x-zXc|9EJdJ*k5S~XUD78z+N9K+X*Baf;+S+Q94;ylX#tn18(ir+&
zK)ULE@o3QMd!EyHBgWs%Bc-8u25<g2<`+f--+jvl*c>^zJQh89<rnkY^Nimv{?)a$
zp=we5jnKGJo^~r<wn>bIK}fWtyzm$aN9b=?+T&4(l<!8sb+z+C7lD}E^;X7P)i`k&
zGt|ih+iS9L_uvz@V(xOZb1Z_4Nq$lj0v)DcIV7DoUaJqQP|aUHz;r@BX`&fJan(hF
zDF$rPTT)ve$I`L5d?6-jgUK+fWdx-A#_{TSCe0X=bh@jj=4l0N(nG0LE_H1CYp3&X
zLXME!sBS28#p6xgRxToa>Qly%_q;$Olfh1p<*)8n#y)Z``N`fhzGh`QUKmZs-p;2X
zwq}Mp8Be_?3oE|4^Xs*-!A7MyJWyTFo0p7;{HIo=KtCfQ{Y8~lk$y&G{qJ2izcD__
zgF;?gF4ICkrP@;)t!9e_X$MjhmcH(R{`sstAPkyi<a6uo+VU!2Wq^id8l5FejthMt
zhLO$eUK&j9z_n%>d4K<c&@C~%jDO1E6hi%#)6X+APG>T=vkuCL!Sa63%6bl#k<VQo
z#fcbR7CD7byF|Y9pN46bk-oW07<#}aU%=?Ct*_Q~Ba!_U#6ohzcne0yM+O>8Dy>s^
zD_@{MZk6glBhp|YEKne~s%MvJ<~WzMs5Q<gF{bmWw^FaR+q~6MURgHjp`x*nT8aO1
z=gX~2?f?8ny%F?E$Wb7&q0ahG!M0q|T2dM7J8E8#T8iOib2CD>k{7x}#uI~F>PPO<
za!5MAQXd+}B4G8X%E(BiuM*6c=LpFSYQ^GF%;{l*Jeq@xC8X_C`=HLkAM#0a*(hf8
zA}!A&jRm7>4zss`iE&;eLw*Kz?&vTzS4H4fOz!m13Tf9r-dq(f0cJ#b`YUSfHJS1m
zyJSjve$Xh4g*A4q#Ch(#w=dvBq)2{IS<FV6b4d$@+bG#`8`PFaMlR?(o%NS<NlSls
zvw9(4`&SvBx1=}QhciqS%F5bmMyKC1m5z%^Ul0E@_!tu3XsSwnOf?$kJ1%LVPPuoV
zCY&3hD&-{&KCj`SozVDf62`FufLIvf%&m{DsTAQjVTj|{c{olO;>^QZ@Gx+ACG!#T
z(h$k>^Ps%cU{Q}IY9D~%xW;ey9{B0Fq-DC{*#Z{{vbx#2tvmY0D<fv^)E9=gRR+TF
zNfR?J^(b17#pT==tDsl(mXSTeym~N7^*r*Uhinoe4bLNu{>G|yf4f<253OO159gAW
z^R*w#W>sE%V@ZIT(RSEILfWV;80{{v4Oa_C`+?B7VLTRac*X^!tKOs0zUMiOXtb|1
zaz^{y_pqet*#It&V^?d9)hnm*{N!=BQsb&yzh`@IToCHcLqC2v^4@40QK;YDyEsfb
zqfGozb6Kl0Sg$7h4-|}B<f`$IT8JoIu`tAoniM1x^T~y++B&7(7~_&4M=<Ff%MFT|
znJILcBeyPT{MQOX6JV-QUnn;&SJXy_8|w_@;?dz?Ed~+Hk&{cbbb%yFnOt!mV&pCc
zRN}xL>m!GSL<fh9$(`=5Xc^NF*s)%2e?@!YdOI$l@!Y)DGjsD!i`?aF8c7hxh4sqn
zs9YL&B_Lgwzyq~*Fhd>*0)Jy&7d$Np{9Qa0tY<rK6+h7Ih6M&oTBoV_0e+<E;GL#(
zi?yd()SJ)B0>5)yIRBk22zNAJ;QUwo!0$-Y`J2_8*Juu7C{pBri-WK%sg2e~#z!J|
zE=XJ)gp3P<3a`rx7l(M2^+`M?4)K^E508lhJRR}i3+*E3WPuA<3e--g);+Iid7*N%
zrm;@Pm+N{a=`6x`Jfp*yq|;ZawX#Z_OIk}Pj+lYW_!bG2E@f-3ZYWUbXuq+JobeWM
zWqi_fDYp=IXRRTOmAK_eo|IhqQkxk|W<;dFY-|_<aUGm&*ECOB;EFnvyfw>ls#Sux
z9o4K1^6a$V8^sUe6KT50a7}t1tPW=q7s{b12*bQ)ZS|V=Xtl+I@6wk^gYYnZP8DI&
z0P9$o_+}Pwb$G2PK(Wjm2rCNw$CtE1GjDlm>STe7#iL{6?S+k@U{S@zo=F2&c{4Z8
zkrxJW@$8(~cwvCE2y57=8F|sMBGNY&2APZx`J}nH)vu}&4b7ViMycuH1BzTRxzno5
zEeldsrovQH-PU|3=!p?D7m}N!XwZaJ-P&NY(&R(`NMLkPu6vt}A=no+!_o`&NM>Bn
z<VJ!2p~&bjSP#?q7N*Gh$H&OjEl4b&<j9RnG-z1+Y{RH;5T-x8YQb47G;UCWoC}t|
za1cn0Hey(y9Y~C}?h9fd3_B!Vp5S>XGWts@r&LW(JPZmK_>dUYObLYd2u&!>xXKNc
zcV@-jU3asxPL-c%jZA?BLgNPM*T1eZrshA_26i_(aRZ^p3D~UGDx5%QaR8E+2Bd5F
zMzgz$x$>%wTHtpmQu@_4roz<1Roqk|nL<&dPs_i&cfs_v$mpYW%u#h*(n2D-cOPKR
z_1M(m$27y}P&hrM(F37zW0gr3PW!=4ER5V_hw0XVl0@2sQ0&4<eO$822q5<vug;lI
z7#~-sixh66L0{ii-xXG`A^~A9ZaKxdnDY<zp=4#`q95aN=O*x+^OYv7sca~sSukX0
zC?b7YlT)7!ThvT*$Uq8JbX+3m=(h6Soc-)L@0FZfA`zD~V3v?m>=~bH=9Xgkp!r7g
zsCr|tQdd6Y=x3zHRbLgWHpq!0cR6x$xuVh>Q%)9-X4;0VEO4Pi!jjqP8wqJ6%K~In
zQeNY8#pG^&Wvo5k)Uja@0h{!Y2Pyw^#mawWONh1~`^32aoXzxTRj){y$-E1_99!Td
z5Rw}tLq^r}NCOecXn3B{Fjne$ilK<~X)PzC$(Sl!DC-%We$ONw%-0>=z~XalC|DH^
z*raEbsKG%mIG*Ur6?1oF3B_{C>MJ(aqt<RQj0w@cSAz*3>mpL%Zll)Jx^QizWgz9X
z?Z#2dav?%FM}VZ*jgsVOV;Vl(e!;>)Zj3y~CB^lHTr-`>2Xx`cc!PJs8M2ddSILxT
zLhc4dW$yZ*8){LkB+{N&!Kgc@EI?h0BK<{`VED^?SCUA3-gvtWYt#b&jgY(1!|5D-
zBjCCV*1|9_Zq{vpn2@_cnVGx3mKPu^iL?=`oX|j-1d%Q!SiZ87#M&zQ1^QaXU8qU#
z9Bs`HH1#4$FD*;;RM%GX!i1KH%qp1<`GC=>>Z&#yb!_Lscnf9|W2i1ID%XK&-wSD`
zd}EO)@<y*d1felOL30;X`p}O^EgKG_P#6d+Czp#`q36s|MwCS=Xq`hZ1Es7VQA-`V
zb?tS?jv8gwQw<U{Qp;jTEqDmx+VPN0wJ6)t73*YC;e)NiEV(m2IGwBytm+rVovCEq
zXWF0;oV7%uz_cLj8&CwF)y#$hI{Isa5+5~L8Q4%U*ROe48JmDlTrJn17Yd4!=epG2
zf{CS0MmN_-7j3PcDlV~Vu74)-FYH^WnD1BIx@R)Yz+_#=Eq&<HxA!O^_(~5n#)sDE
z9u)tq<p>oLhJD@vPq9#7bglmj@e#<|A)x%SLhirj<@j#|q>B+L7MPC0yeCE{Of}?_
zCe32P#IPHmrAl+4I)WAcaIURD!MY6{GAMEv3CRsjPKIorw0Tl;)jK{irWzKm%$Q22
zTFue%Owz#+TY1C6=~!IOVXT6-Sy@;gZ>@>_U`+DGzjA*#SA`cz9(&a5c%(sFtzr$&
z3pDHoG~X;bMJrO6If3tWJkr2;PC>#_2ey(dogxA0q9Ww$Mgr1Bk4NdQsDzd}A)hoc
zcbBUf@<~&NQ7H!rt%7dYq^BO^-YjF1j@{dx@huXNE}}ONM=11G+T7-dbB$JSv$`s)
z<+!AUW?&H}5o$v`28>BMS|RE)Fs9du`ZQX{^=15PFjxxM-miG*tJO4@wDf#TOS4%)
z3HjbX3I=_ml$_~%crPVawi~iI3y~3JQLIWKkW=t(<FA7bng1&2cVy0jRjZ=4%8bbS
z>h*DK(P`lbId*0>yld<}36^oR3(fUfz=+1C&S%B<nmXutYcbkIEcY&#R@Z4^CP}@-
zYwNCDfrCKk-Smx*7C7({uT68c1?o)X_039Nfs;_`T~Sk}a+}(ZV1-ksS1k7~(O@2h
zvnDdBW%+^T-0^l}2&2ZJ$<+wZXHxG<Z5GS;uDM=o+2|PUNz3S2zSpEgLr);1Y588W
zZ+wJS9&^_7bckbl>@qTD<c^dm)~8yL36=&?qFA3oT%fN+q>ofx1ao|{DbL>zoslwE
z%yZ+9wdo^QOp4N>IONtm&ucWWtBe<mNI<&yDv~?wWT+2T!lNl732EcYSfCwCNL%f9
zBYpnGVNBAYe%P^tCx=@f7}M%393PE0Q;?Qplb&7`&iEFvwSTLrsMqT``O(veW!^=h
zx)T^(q_boBeiFRq0;8*zmyLEUnDm2wEHrLdzC%fs2>dRuHV4CFZ5Y$I?5+-u^K)yU
zp;<=0cyv`G^j&d`6bl->IKi>v7-@X02E_hk4ZO_9w(J_LqC@{3*Z8so)&Z5(ycvO0
z%M5k+E>y3y+@=Q-``Ga|S&ZbB9m<PHY<x%Ahy+0%X2s-=76D8s=Qa4@A(6o9_TxCg
zdbQ9+MojLg^w4}@lt}^G=;1TnhYbhSX3@6rP-<LpW)>tPC0E)WK`)hIpODrGzAZ%>
z-gTiPC-AytZ@;e6tU8g;8x53E2;vrm3<>AFiL{DhZxTP!*2N_<m1Mp%xsM60+=gRj
z9RbVFXsTJ4jHO)Qh^vr$)qArZ`jiUTq=zzXv~u>)<Os<P;#ce@M;PL`Z8APo9>)no
z9LM%<JdP8FIP>!N-I$PjX^7<cc~D+Tu5=eOo5&&GYaYLBd66;`kv{DL%c^p&@>QKF
zmDQuqo_9~0y08yanyYDZSJp-<UrsKOHC)JA@K{3H$dm%@SmHwkf6#=}=OL;_+NVhD
z-NDC{yGZQa!NbMq1Y<%B<|lzkuLBI^-X**YB))`bm$BTt1m^NubsgEJu@rQ8PUc;}
zFUQTnaWd}$7;Yz5?1Ia@3t$ww;4<%GvD!MHHAvJk38mf@&<m4V3%o9H^L3qobm60h
zBr7u2aIs{~5t18tQ0yj0n8ERt-zRT?<rOk`UgX^dm|lV0>VT>aGm9AKdM!!<we2^f
z=_NVZyl3vy#q1vr^`jlaURw#Wy7^v{vYy}c9z<46ks$P$bn|`I60dD{0A>=z39`Cb
zpRF<v^L(7%nekOIy^r*|fcFD+tPI=h(ZNWWEs$E5iM&3h^ja9bXW%WLG|db{_LC3^
zn65?)dPhdtNk|(8n6zw?^*It4-DRVLMcSc=^ihd)H?n1V#fusZepF(<T*aZ*M5&nB
zeJJYE++oadQZFssHr8FFLhf}G;kX-qWR54vlPc8Xj#>;ix^EZt7rQeuaslmu$STOO
zjJ&tnTtz3@z*a|{H4@|qlyl@+moQp7b-X${#0zQY)@!Jkz*LUwVl0n$l3;d#CiVI3
zykn8PuS!R3$10p_LWi*_S!T0hivU@s(dom1Oj#wzjkGYr84Lmj53R0|k&9>s$j$oD
zL;v`owmf6O3i;#%ZEeld4EdzFg3f&i_841^^@jnu9kXWGvZgYYvl}EwsAARX&)0`>
zCKr&dt2au*LJ$c^7iq}dN5+YHQsYY3+!afrdH@%JgtWVlUplvU@k%se!E|%Lp2SU#
zbL>ryhjj^Y)o+Bj<9E0h>k%R9Go-lY4k_k*ONv$Bm*R<UOVN9q6w~jLq6_cm-7Lkx
z?NZzfy7Xt@gATt_istvEcmT9DXun@bapwb4oCI3*BPo7;j}-g-T#7UByCZ1fPo&uV
zJ}It+toMgf{OZTR1aI1RrFahhEdu@>_e*g$-cQ2s1Gh-g1~u;m)=%+{-z)LEHGXe|
z`_tk6(YvMC2EXUv_gP>a_X8;|#_wYYZ_hWSxDK=p{P;NH^*m_!R)i6>58`(|=n-ID
zfq2}5I9`IdJ`K7ReoTXZb%gcDptHY&@PlqddOU{Tzg#B8w&zH3;+Lhk@FK+fL-21;
z__4DTN9=;QfVb5XLfrSb5bN=Kz#oJ-?2kfSFnH%<LR^aXEAhSt@<kt(;_AJnc;-pm
zr@0Bz<L}TLkz(WPq}b_tgj*urpB19rCB+GsNO8djrML~hd*Syf(1W=9WCnEB|5b>s
zK)(Vlr2Ee3?!zObxcrMq8~mPdg%q3Kg#6n`iqUJ4mY~BR`^?)BX3$n=NpatqNOy$q
z=`Ey~_fDkS*-|_V{#yKYze9>&;`bW-?vAv5?CVn8@HHv=zJYM!`DXl{jo;_@m*S#b
zk@txA(SH(Rcz5LAX-FsVuS42Agy-iV-xajzbSbU}-F^!4eM|TU{TmVX!F9MI5A^Z3
z>Fe^YnTg*&3$bq<c?&x7O9%((;J*lQIcW1skw3cub2-8}4e@xD6i*(9@`3jY@O$(!
zDV86PFoLF?AjP?$$3fSFzddyBIvim-2>iE7vBO%p`MnT(A`PC#@2roaynF;ay!VX5
z4QQuXQk)6e6?wKR{D1yLq$%?2bmZU3Z-$?^bMYzA*`WGAgg60lyY&-NtQkRGAWwHV
zS&F3>OL4&ENZU(MZV~pSkROL|)ISd1>yXy?eFVRke+cP@IGz7D<O^t5&<&d+zMFyf
z73j86{=R@RiRXv$eC2d0#9ZX#jffYhXAjg>@b20R`SWU|3uqR~+vou(zu;|+_Xm+j
z=OPTZ%!9vILVrHuaFi5-{}N)}^FkcA0D9f<8~n?!mg3$;sLP*0J^mExG2A?I72HFA
z24qj;w+{SEKMS0NQtXM}+u{E1E071DLq1=Fu)GQN8s+MIr1jIF(GTDmxP$O_)(uiz
z1*(4<c?8VI_dz)Y&cpC?J#emqpPS?N2H-yjH)lZSLFg{s2x<HlDYo21ijQuL_Ua|5
zS6kzETa>@8kRID0+@MGBeCdl&cObv>#i(1LInX=erHDIt*Wq_#=#0G<n4q5hkWR?|
z%l1Y6KNM~cL!3aHL$)7ymwo}|bS2uzYk`aW>_gm+MmTPQyWLUdPe=Hd&qjXUgm{9k
zMtQmr<>t4uP>xa8*P#Ah^-t8>OHr@)KwGpY@?mq7ug?SHG~~l-=+zMZ3d;N{l>Z^r
z<6)!=s13Rw{ClcMf9O30x^E5A5p)gQJb4oGX%KE9zY)K?!|hh!tq0Y&L)s&4&f5tX
zpkJcgHeZQ)4Z3f8#0S4UFGHU1i1_3Egq@L2JD}{p0&&Cd9gyt~*`;`&{&JKB&|~nk
z_dvws4IupPcO>%bov8E3-<vK*oKWru&p^F-EA&=?2YPHXq#5G-EO^tuigX6u4V~*g
zj54<m!hpMw&i}U%XF>1iO_AT{pdO)qA2}0gwH|qY4C>z!=pBpn#q%ui9$Jd;1=4C2
zey>@KFhhPGXj(7)?Lk@{4fptcXaH?cAL<K!w+HW8&_4YL7kD?3f8bw&_xs<7@(H)k
zfL7u8tjpjh=!DlGo}jNIj87wsC!vix;S{7h;`zXJkT(&I7V=;W@jMmJqsV`}Z{9%p
z1#g=+Fz|di_&Y*=7U+Jw-wk(<d>-j?4g8sba(Dp3eK7JF_3=@(Co@o9E<6O`0j-*Y
z_5^MH*`OJ31I{NAmm`3I-x;q*UgP&l{ND9tDR%w}@^%E_yB^`c2<i4QqzU}E=R(w<
z_oDs5??dM!&EAi^y8z$T_W|QQDBpNK0=(Oxb0ORufw1g^-@9icjPF8zyc^*JU3f0i
z5Y&Ah+DXu^aC<)J7U=8={^{^<YtXN|P=-PKz6kzoi#m(A?zu7i0rvH1Z%%jvzOM%%
zpFqO`-#WCzk0ReMLwR}#@$E)EoPHwe0%+Bl$osFM&f@oWd?%-`M7{nT-d~6GUX1u5
zUrxpSNY}`p$=7y$U;IFvE8ZgBBvy(yi;6g2oG4Bb$B7ffW#S{^v*M>>r5qMNm)D9f
ziyOqaXp6OCow!OofxChKC<bLyROBl03-KS~LGh4yLY^XKcl}8Y$-l~{<a)7%{G;4b
z?$vcz*B9jda^J3NL`$43rghcDtGZ^%GsT`=hscBF&qY;yQT|ChDXx`Q%1_JH@-A_Y
zc(?d};xl5mt~bb|<m==Sa-N(k=gULo0y#$>DG!&gm)&xq+`a2C`3`xO{E&RFe7Ag$
zyhy%JzF&S&ULrpr&zBd<i{(4z+44O3E_trJKrR!@#V5oS;=|&T;!E;t;wJG2aiiE+
z{ziOMY$Tr*RovjVN_NSAiRW>d@N?qdVn}>mo*?&?Kb7~%H_9H_D;LSd@@RRSJRX-8
zUn+$BgS=E;CjTj(7W;I)rt1!|v3N{gFTWsuBJLF@$v4TDcD<zQaoN@Nr2Mn|i#(v~
zz^*0oSh-Zr>G~h>d+|H*UGWe3Px*KGR&koxyX(X9BXa+)zl)d4{p7ZC6S<juvD`||
zkgt(1lCPAnkO#=k<@Rz1`D*zRd1%+o@(%fJ`91knd4s%7-YUN-Z;@Y--;iIEH_E%@
zx8yzYNAkz=hw^s$eR-$6TYgvmKz>JlQ+!9<F1{hYDz1>9lJ|+5#n;6x;#=ZY@ojON
zST0{Br*|FH^)vB+I7hCKv*p2E{qjutc6pkt%ceYCo*~~RM`c5vD#zqoWlMftj>wZ`
zTaL?ha;-c?u9yEMJ|TZ0ACR9GUliAg)#5+J^YTCCZ{&Z;U&}}2|C7Ixzm>m}zm)$W
z|3^L~AC~_u|04b-ek6V@?i9Pp-R0-R)#4`kWm%IS6PJrmi7Uk?<@dyA<mK{n@^A9<
zVpi9Tu6K!dinGNzVwyZ(Tp%tK?-B16?-S>VkIG(go!qZ$+pbr3?cDXUt{u9z>)O8S
z<y|{=y`pQUt}VM>-L+}g#$7M&+Pv#UUAuPe(zQj`CS4nKZPm3|m+0yfE5si1WAaz%
zWsZqaajF;*4bd+i6JHU(7Q=E-TrKaFKat0ZW8@FT-Qq2Bo35?9WY;6&QSn>xOYz@g
znS4e*E1#BUh@ItY<xAzJa!+}X+(*uo+sNs1C%L0MQ0_0cle@@6x?U!?mV0!qk&nvX
z%U$JPMV}myZx?5YcZk!)8gajPP<}~VFKXgp@o)J?@toXC?k)c%|16#o&xn7B&&sRh
z+r*pYiE?Jw2gC=(hr~tVVsVLhzt~F55r>My#OuVq;x%GZakzNBI6}NZ94Y3C&BYdC
ziRcl>h^6AiVr#L7*jvmKuNIq#?ZviYJMmI6U%W_cCc4FzVu4sF4i-m=MPiniA*PAx
zVrTJ6@d~k{c$Jtb4idYGUBvF<K=E3!zt~S4Aa)gt#nECL@e;A8*hlOob`pn(+2ZA5
z2k|nrSzZ6v;{0EW^M5VQ|Ft;(OD)bub|z98%xbG%nn#K)mkdu&MyeOO90A)U=v;93
zZqMa(4D5(f&+4`x_{rYF8+S67-e7WzDdUZ<w()+^IxTy}Y+LviYMZNJn2KJ%)eJh|
zUrjA4@9E7=ALbmbo;|XboMpBOtA!<Rp%WQqsaF^Zj92v$=PDq|LN#%yx5CN2^(4A3
zg>1&PkLLOlZaa>H7I5H+IUC-1Nep%*q*1Y_05PSUgC_MJ=G<+`@JHvko@h0DdzE*}
zmA-Swzq5mH^O$6Rp`)c-du@NB><frvACwUf=D9r@F1j)I`!HTS>1*bVV`CZSLLHBt
ziLYJR@rs5MTb5`5a~q4-*0a4Vw3@*!gJNYD<NQUFU|yKqVs#>F3G!?Y{TfE*=zg=X
z<+9$q`>f7e`7*pP@ePwFnqjeY$Xrm9T#G6=Y$yNNp6s#CzB;Qk(pwQI)$BT^${PD(
zHk!n{82tVbt?f3V7=6D{Uipky*2>b3dEUs<yJ=(Tk=Iyyr@JJ`m;41pQ!C9uI7Say
z%T8l4%mvxQe~bhTvp;&r^UUU1l!7)4v1}NiYy>O^5jOB?9o?N{<Ne`>8hLKsc{60M
zYEMftnmOn5OysyB;~JloJe=(LHQB{hL`j>)1_27Tmf~B;x6y_h*u2ngD^_6&Jn-8Y
zLbp-L429GhMBCy+1)ZIG8-5m(q_j1>P@>u~eSAQ!j@cDh?8(Hp4d(l=wiLclD1t2`
znfSYXkj0^NDuo*qi}Th<P^aDb3t<onwha>-j{Lrz(3RU96tcCMoX#+0p1ruz-bSes
zk@4BZj+EEb71AsAK2QPMY^Jc@D1CKPl0zg`sO7d?8CSWNb^twd^@lxv+qw%bByr5l
zxwuwBJrta(V?$;dA|1yd%tm64$W^Pf){nxomEnetb}@QI_jiWk*iF$X!6qPuM<-if
z^+C-%yv8NZJj2R6C)t19kou|_!IzA7vKWA(vw*S(D#Cryb_wGbKPM_=!7@!BmF3wy
z{PtWVeF~D-Q73}=4>Cf4goxhV;Yr4s+QAm+(Ecl5=uJbHy#>s;hc+uDTw7bUSnF`F
z&Ndcv^}Id8@LZqa9i7MR_~u43<9UasE7nzXJ&R>y^YAJR?QG6_yLhW$H$<9uHj+^W
zriL9~0%F<x@F-qh`?w&;7@MG#b)gX-^4W{Dk)i|f2KvfwBH(K?T^m2`g)iy$J?pO8
zjI7O70E}ggL(uQYsX_0jE!XsMV$Bd|^Kx$a0gsJCc(R#ZUtpt+a2a+c!3^KoqMKm*
zD}4;Zbi8A44Ljql;m6+UL5x2iTfg&YX{#*@%C_Ef7TXk$s71C-Ifvcr4rYA*GMpNL
zKk3z=Xudm{+G&~^&c3_W4_5bWLDHTqbf<PZUsT6S-{0wY_)?oYZ-O9Da~k^R6HoiE
z4jwj#M+Z*h<DVTAKOxpJ<D=8Pz#hgOc+M-f4hJ7?ILH2IamO--OXS-kwMW|bUG=*e
z)|oAxUeD~<bjI{v0>6XpoL=HHO8f0fnt83--LKvWDr&U5798!pZl#S=$INc;b=@zm
zYWGM7J<U+0w@>>(kP~d@_KbK3sM=P-<a*<zo43-wztC<<?|o`>r`^8mwJ{jV-Mk9E
z+8oO?&GyrflgZlbzSW$~#NjP!@#SP1A-<=LtnK~1c1B0_W=z`YlOt@#hIdIBb#}E|
zUpHMHXZZ28ZJs@osl@lMS*;-q$VYAe#U2}}g#N5@dUaV!58gIXi_jmVi?5VXcZS2S
zlFB`OsrYnBD+x|RRsdO>$NV!lZCUq~+i`MF(z4QMz-rm5#q<a3QBbT1mEWD96!Y!y
zl<CgH?pu{ToR>X448F2c_9B1XSdIBXylAKyN%c4}#*U9giw5M@4|iSy4~lEetQIkX
zVWAVSMvd_(F?8AIOqCiLcILP`*UDM9Y<NlYFYFd9dX3MAp@?L*6%)g3Dz<WSOR9dg
zmvjbi#*EzZ{Ny4n3-Hepy_=4S$_DIavcK`x*kGrw9}B?+N`~(Bxvj1mIB&zT>oUS!
z(Y{*ax?uY;HMMN>v{42(KMWAK-OxrCy(6_%*}Sq{#P${@EirHCYSPi?#KYu-?X&Ag
zeZgPi-ozKa1VQ<0@0sN<D7JR-D0|ijzQFp6rb{JWZJE;yDvExo#J#bCp}wD9#lLc$
z@K_<%BjT4&QlP^Vec~gFf}?lThng2UH9GPkaf?e?8+UzIIc81u?c@ZUbH6!;<yDnk
z(beYcIt%3zn_q%k!-Ffp-F@D)5yoB4tzXO?+WLiE3yxdAwa5HaD$QA4!-b&DxnpBS
zL%*xGU1wOlZ9$K*)H;}PLgTgkeLfmOkLN1u3U@?>YI?s`4$p?i`sco-ek57A+sS(u
z&hl%gXeiufKd(t27Qgi+;0IeB0}0=D83<HuWhn;Vb2cg#Z4=}hnRJ*bliAXrW};Ea
zi3qp_0fr<$z!1a%1}i7P@xECdXWGo^CM`{*TWBBcJSLm3L~9yfo05Md8u?7U9QLs)
zIO7b<QMgG+uU_C8i^y(a6|c)H^>JMd@xlf4%okh{VHaRN@d|#B$mTulyb_boT++Lr
zYs8!XQqc>Y_sVANz;Y9D<~cYiMWqF%%5c67l><FF3dLaV4DV*iNrgmLA7c8+BfcaB
zNgtobaWfT4U@KiXeGrHgipp%qX45s`nGbw0G2z)6pY!HBym}OLqs*?~q?TMAQt#Rc
zI5y3D90bG81i|owxe@YZO_OS0v(~`j9WL?p;5z@7q@vqo+`I*R)$Zfj2`6Be2$}5H
z4#MdOr|zaB9EV@G;+WZM!ip}8iDxgI7xC<c7uc(8g7ES=4X2ua^B39%bGKCBgWj?g
zvgjI(faB*g97wQ*QYnXLluz_f66;i_zlLSkU@CDtxF$if;yVlmdiwQJuJQtJ_$Js#
z1X@3;qGo22j5&<&Hi!2dXO8sI#xlMK!`@kEN^`lVun-D*<66U11{JJjqs?mAbtpR6
zK_?RL+iT5$@4wqx7#;br%w1GU3F+Qg2UyJ{x&gR)bKNy+*Zo9-d*e;LYWh`5tJtCE
zt*<*^R}?s&y8?m2vcha`RL^J(s;=X<Lcf_VnRzpPGWF*AZsv_^_AKC<ru$YI)J=G0
zs9_#jINpqqaj~C&m|<9rRraB#9xz%!zUs$nO^>XHuphfsTdf^iZ}F?0uCj=$>J76<
zwhD70%8S)7HW*#29&1CeO!7}Fhv<$=_THc)`6-x6ab?S|GuK+fwbjPtsn%-YW-a5c
z-KZ!R^}2g913<?Z@zEKnjtt=ja)O{oY(qDXv<=rjw@if^YpheOQ?c%+s~FmHmohD+
z(x4749M~AzbjGhvnbznS&f0IAms5>LEj$<&{IRO-<`536v;joyht}w&IeIj~#^dm~
zjsYI)L&i2wJ)7vzvq?vKT<824jl3Uao^q^aRc8`h<DW_c{I+qKG^Lh$)P+Jnvgh&O
z*}aU9X<uyN!>^8g8yz2vQI)^#24;cHGqwzye8qiu{?R?PSIq%--Dcq0AeGm)e44Hs
zYPmF0&nAuNSzZ6IcuDWEeFH1I7xd5XTiiQvY~M=6aRzqzE?w4xiMm+Iw*SI}L$wt-
z=cE>#@dg*#y3(Isfq4LDV;OO6w<Tf+C(zGzR+mgm@yPZvhet+}F`K?6IlMeonrzAF
z(g!GZ4_=;oYo!^h>(2)Fh1O_QE6r4ir`l@I3#~Uje}2}6lMD0jg28EjP2JgMy2Q*q
zgO9C<vVg0(QT-j|o}ismO!F~iqQKfS<QtpMkVzjPju+W~2Fl(FG{ZIn!VKSjev1TV
zN{5NjDAL_tsS9mjBbsrLe^ZX`Tg1xV4m9JB7tus*Ld+~ki@o<s`Mz0e(%1)#4~<P}
zMzSV#E<tQxnsLpoOk<(#Wfkz%#C6eeXSxRyyENU@d^Wd1&DV(1ENhSIods5@j2+mf
z2K<6HRokg@%~Q$dI<A3c{ZZMha%Fr)8mGmD&M51N5`2*|4)U_wILc;<5oFRfuxn)y
zMJKIrmCS{bUYNCX<ucXdGRG4!ezO_eTBW{g+B(B4+37Hr9G1n8{n2w1%7=jMj9|HY
z#|%`_b2Kv`3?0l&+d~Gh+0P_#gRdkpvh5mi$$HjfL^ia6Ahe_nM4>%xAj+|-4dnRr
zdfVDqf;4Divd3E37Kk(DD#zM3cFJvTV_9Gk8whC~$C|qa0&ayH2x42@K!`3vLEIrJ
z4h6H$4ZI6&bR$tcKj?wEtvAW#MWR5(TJ18NKvOS~7D;pKS7_ErZ7EweEb>Vgp!ia)
z&@0jjuY1astM5g8F<3^E+xI2{6<YbG6eoB^$izZtSC9&G5NXk`;)u57g%ZUnzQA25
z#@@FO)5w9CytGLz{1&BDun1hFq!w?Bgk~X^iE}2c^QC4EJ`{Q*$~fTg*9++@5*)@(
zrBR8sMQ+y9JQvQQI8e{n6=$T4@!+U8V7vMFP&Y6si9{KD<4|tr?nAC_y9;A#+IBvL
zo9CXJy~XnIqDe%p$R;^(7~b0$i8A)dp<LUk8Y|^IQFrfRUJDQ;X0e>-AX{{di@<t0
za2XHK5OLL<nl+I?H)GEnN{erhS4%2^GI4Sbgy(k7feRzR)MNg|z}dK4%b}e6U#%$a
zlp*$RB_}{v>+LxnF^lLxpDg5jt>xV@KsceXNRC~yY|gsW-Dqnm9Xk!|rE`V$W~ErD
z>xQ+R&X?weH^+`TaLnZBB4RX=JqXY*i)AwrmV?@|I`p*Q+;MAV9VoMzh3GI}<ONx8
z1lnNCjX0UbxzGU@=M*TLVcZfs^1!a~>4m&pA+8R_R+iv2d<wl9L7T<C9CyKJ%jp;U
z6j*CV{-sVsM2RNL?ZAbtsnpd#eJd7bGu=APk+AW<S|<%5_4wQ^GUf^WQe&RfEjWfk
zHE+;2cS#rtGdRtMGr|HmBh80%7T`f3m>qb?&pE@B*@TC>B*?o*&Dw_t+J&D>JxJoV
z;(<FO_7>wgQnmh?$JZ^8HcBp*XY*czYD@AQcWMC89p;~Qf<C#iD$jAI7ISm>!L`Du
zSlTPTQk&z5uh-^??fPtvh*=KgMZqoN<~YqZc{;4w5xk3rUct@rD|4Wxwm6GNXPRp!
zYK$SX_f1QUF}kx<kL3uAQHguJ+I?I=G&gE646|x2*kcg-5QjG#8yC)SouzgxvNr9p
zr_r!MJ0m6)!JH9QsGzOf^Zd-Yx=dTV$IAHL1xJ}`0A~FjJ76~O`J&XlLWMc#uda$c
zrKyVKwNLriM<uRlDkx;uW<Cyp9=zo^GWPRazeQH`F@sjLaAOKBZqe0>bL;Ke^6F5?
zq@j^FO$zi?4?EOWEZ*CLw`4n}jMu}XiXkC;`@qM(ZnC-$SY&%2Goo-qWoAYP*7&ir
zS+ToZ%Y5d_+vj^`wERS`-d3Nhaf|%|zO~zD;?%Xu{b6`ve8P`@&b8#{KJzn(S=wZ+
z`n@At&*P@i@iP|wS+}Z4#2rfOf%=|#YyYfkG@#DrKVKS7Vp)&jPTBwGy2(u4Fjsml
zE}D~J^0b8Fqi2X%3P-L}^87p1bEo{QuWr9UHmcEg#JV6h=AGhij}vD;CrqOE5#&IP
zkLXn8m7lp&dkV77qOXi~keQaES!QFn={+H4yw@O<I>Y^tow>}6d@{QaxTf8Mki*L?
z+0r6-yktbYYzKw7*##lk(Xt|gH@8oLqnmrh0#-ct0k<gq(XN5O1;ozyFc3sTGx%ip
zG6ZUd$yZ6B8t-ceoY2RT?QV!=snEh{erWA}hrlzn>w$CZ-iI6>257Vc!b|k#hYW*{
zq?c4$r?dtdtSd`f1_CGRYa=ix)csQ~sErm0)~i{TKv%Oei(BE22_JIw0d)t7*FP|A
zjpA&obTOdgWeT?6K*7T2nOS|mx55W6O0I#1N=ELXyE&OT%D*)_dw;X)DrT#cnlXtZ
z8kvfOdoetAv?C)HEbL`rH(NDmcSfwYq{@p%AKR(HG+_&@%>H5)Ec#*=%=$u>vI2}*
z8HdUqFw}xkAZPHM9h~otG_%}HXOHjo;OaCnq<4H|xJ8$~<lOQ?M+K=pAh9P_z{|Lw
zu6lv{d-5GAo9{dvZD|@y?2>noq!eo_$fBSP&RKQP!G?_O9Ergo4Yp(C$k7@XUit;+
zs@=<k6#}<GBla`gR}xF@?vj{KdkZoqMLLRT)C;2zSu%o<ry>Y(egq*Xj9|PEg-0da
zjlv}@eAS{oDS?>nO!0i$b%JkUanFVAQVCs!PFM!Q^hE7gVd{S0O2E{+R{~aKJ7#vZ
zSo3DC>LY9Svjozd3`LR`4IoLBT_q!KYy<{g+{Ymn+w&3xBU+|`Dsh7SFM+<_1rxIE
zIWhs)7?!Az^GXdA_KnM$pFrNnXV{~eKP9&F=XsN@fr3I0vcG5w9nMS#G-rC(3kwj9
z)12vD?=&~6#v$#d2~v-~(SA=&Ml{|;#TZ*tLE>cHL0AyX;#p_}GumqtVCpWK+i{a8
z%bWd$ui9la`|J6+3!F~OcH-nf=1t@}r_n^N<J-MCIsT~~I(ht2y2Q6J6s*HL5A)}M
zy_ov4<|vXJzmb8JI{L0*F6u<f&JR0Zb0&bD%<tvN!7ILmiT3s6IEd$l9ZaythdJ~+
zeL|)g3T1C6&fGN|y333X=!RX)3{=}gYj}XeeL&0)Oe4D5#2olNL0;GF3<?;CFn48l
z2{E1KX?a6hzhlU&lAX5O%pbC|BWP{Z%~q9JFtjWlMQd($9Ig4;ZhyG9h=pft7&gf{
z`iieJBnKrcjr)L^Jx3f9n-}KQ1LN#?|B=^LD^<}1qvAz_7Pkk9rJdttKinkJ&0wG(
z?oslg$nI3)vdm>{Zr2jia(kCDykG$1_c29!Jqv;%jM2-w4F)l^+sON)b~&+E+JnPh
zY12vg$_HDOHg-R8aihjw%}yvUAENzG?A7jy;w&CEwL{7a)jlcqs&-4UcbKFy-&gB>
zMP9D=Pet!$7gfw{&|a#T*^2G^?7k|dXm(dI4sE;fuX;lcdk?0w)NUOwG<&ZYL+`)}
z8Nqd+*qEhuV+AU$pb?{;SzfI7XZd%#OUrXwdao8^#3>t=8NBgA;dU1>4`?S>_!_tX
zV(5k6+r?bz9bO>=`@8}M?e+>7YCluN2@(*QtoJtA1s`Hy7g*rN?FEatF}&Ir7U<CK
zuz&&YLk8^>3pB9JE5pceuy-tSguP!Oi}sOuov>o7J!MR2c9wCD-e2Z9dY2h{kBjzr
zRGP=xzBBL4?>^(axCGdpXkN$eM+-TqwQr{_XnF(Mz@lYKk6GD0cR(G}GUI^Zdi}s@
z`tP9G(`U>%<k0EU=NN|hnQne(&6#1IPTF$#+>OMvT`oC(Q(S4aIj#|;zXOJ=gX63B
z?ALDsH=_vL9~1r^DYlt+Kn)+$%APZ<WP3c^4SLuxHp1_bV)|`H?_k5my(vQA>M5s3
z{3FG*+i<-kZbsRHUd84VQ;O8yV!LfN1LI!B7PydQOYzOhwo7V5%|;9TrD^;0y=Gd^
z;-$wJdGNCp-Y67A0{0Hl(>C~j6ZK>vfIie18AC9?S&{PJxc$lNLs3w;x`d7GkwU1D
zuCW{yFO6b!W|0mY;f@-^ne92m--Z4&f8@KsEi(SE3%Ab*6z9wy!xz|rI5nkX#;ikF
z>F83W<0gbhhcmx)4Ew;VCssOUS`n3wEA5N<rP4wEPHnDFBsVDs3b%4@X-mdLYt#&L
zN9wd`zD7L+gjAX(YSe6#JgP=gjpBbtiX&}}nq~P^bMLb+=9l!QHf<-8t~&vJYRaK2
zhDcK(s8$Z2OD)waRs1$qJA=cWwl>#9$N4wwP*db1EGi|q7tUhQ91`}A)(0#40ut3g
zO#;m}zY*FFK<n~odPlH}lzo_JnVdB{KTfXz%0?E2Z8En0Pa-}C=Oa;B*q9^Pc_Ig8
zA0`~9gYuDH2Bb|mlD-#VviUPJALZph+0>%A{na)OlSsD&&Sn9Qxi4q3@k!**<}r@B
zJV!&>xJ@{J5;58$K*B-;)v65;qb*~caxt2fUlw*mS$I)^L^CBmN;Ym&k#buFI5cZx
zeV;^`OO%Bd$2jFu&UPcBR*u^5tpg;x_kY90>Lme|o-pzWXfdZEpSH0`v|U0?;aJ}%
zkxz-5^iqzariDu2Bywq6gA*R5FnO%m1>e1CC=uHkEPP&v#<5YN>ZiaRpnaHdUM0%<
z_70Do_+SBC@t81DqLp}=Me>J8Y!I9sI1cqx)B!(}YsY8Xp<+~bcSTOUoa3+!DK636
z!&Ic+jt<Qpl;OO04->^n#p)Fq9Anfn`MgThteq@caK?v&>b(D&D1WbHSpKfK$(4&)
z1vQv@t~)avwVdptV;?4(PpRJcE)j~I>9!b?XlWCz#I7FApP1sH?8AiPl_(Lf3XzP(
zghiM{tlZpBIC|O{IW;ZBvg;iV%sxysUb72I-|oQLEf>p}v{|6eYjVLj;8j4}-6Cd=
zpK)+gTEF&)@btBZldNBfvbbl6;|>CyBvN8@`04;jFU3th?NY7oUICInvBwn%_b?S@
zaqj{&GT3%N?qQ;c2N!%R_d!|QrvTBPCgkAU!-R1QYTbUoea#EvmW%VOMC-C|h;0u>
zas|jfOf;UUufb~rEPwcp#pt}<O!ZOs3(?lF^P3!?eVAzMW*5}>J&}w1hj{UkP!_7=
zl9P(#0U;(oWopuRshPY313bGh@1Qzvsb?3I%)OAF)AO*@ZVhu7mV>nq6V1h01-VEw
zU<ZY$?u5`uw%ZA$83B^9F0oPRyp$C5R$h%b&CEgKr+_)o&hxLJHKmcqtQ^#G?bz%@
zKVr5)%N|xX`Pik($-x%y_@#4u7q7(mPT2bBpzOnhOJqUr(d^417AZJW%_4N(W~N%`
zIUZ|@X3u99%++s)m>ufTDlG9jE`_OlI?Q35>d%vNiDw@soKJ~<*y|h;7SbE1sUzyF
z|C7k2M9Xn_h!l1CEarscRFFfon)3P(NgIOWyakKVdAw5n%OgUxoDPD;op9U=zArSc
zdP9ho`IvQ#m8io<hFEc9KcyvpuET2MJTYTMW+UTrZlWoe`a;jMNJU2-P9C*j=3*N1
zZN5XzoSry&yu_EK+u&snWOQ(y=U<|%EO2=IkjKemrCQ*H7E2u*IeCm(1*02UcR9*p
zkoCXbRqTN5!$eDCYDR03!Lb{7Tv2xq6GlnQ<}Z#=*tBiur6AG9AMJ6lE#2(fvdPtX
zj1u{Cj74&XcslRh!-P{V@qO&c;FL<cM2wbLB$#2EttlHM<(4v>vU6mq_5WiTPG(WQ
z^HNvPXD5rFUWUXM<!$m!MLBpQ!^li#b{?C8mYZx0`WO<O5Nl)6d9NikgVJwMqElxb
zn0=UN+NF9(10E|p$ku|mI8FuHGLIIXjXQa?g7#q#<koVJmUG;$gY3M7B>J~2JSI#W
z{4utZM@!AF9~YoS#{xS@`!L}gEcjaOhBQ4s53zLbGc_Z1LV!s7d@F<Xs=bkTa<eF@
zasQhFJboauO~=k_dn$e>26(~o#tzUvOf&~mt;?G|9vyjM&uz949oP31)>}LlKMH!%
zSgCYe>9OL|r6-M-n%OwX<H00nN@{vyj8X~Erl^!8#tefDZ(U`qd<`$?Bhb9wkjDzo
z>YjAIrM`H>9E~=um97D)5pC6?1&4sM6U8ST$Ab&LeETCsSG@qLJ*3=$cl-^Uo%lwq
z&PCVfN0mS<n0cX@{WZCW?9?d-Yab@sk{(=8C-((vEf<w$q}gFj7O>-Xb#_57V{agz
zoQteG9Qw$22itkh7L+UMQJqqVYwq6GpW}`(Q)RiH!c?m_?lQ2+q9^irqzIiKckdG0
zK1{fD6^uD)4R`bfkj(*R7P9m5RnYp=`c9(|+ufZ$*|aV|JrJm4Y1Apn>r|~ewFuuH
zpyblH^B&;AiJWd0;r0g?5==U$4=$J$Isj?jdO_^c<t)*jv~$tzwFxf5owp~68a<xE
z#H5Ql+|NPVhl$qcR1bA+F0wkH&%(G8Hc`Bk^~t(IJVaBU{5NUbR6N(`;;Li*CXZK8
zf5^t{v|K!Me&FO06Fr>MEh0Pn#~z2{06Q+VsWg3S4x+ySdy=Ezf?6^SX?h03^QZrp
zE32uQjJM_D(d0hPCUHq>A0}EmQe)FIV?=%Jh=p*VQ-SsN2&;6w3fffK+jW-5a+jmH
zNOfK&6Sd$S9w~F-%H$DK^VVmFi0TrT4TJPfhJ@3P)md<pM@o%H&k2$IIm-^tK1{SE
z&Mqh&)PsK)N6Xw_W05-V-=uQwT!v{+`<gsbDt7N?NZEUKHVo!@8BBKRiEBkVPtjC7
z-;=@Q7aUAJp0f&SJMDZtKft6rh9-}b>b1Pr;}qRn<bdtNglk4(7V3fwZrPQ-R1Ut6
zqh)SavhkWksY%4}LWZeNCo?xTIXL?;(ezANcD_G@MwdWYgwA7@`j%gm!BS`IIWYS$
z;dm9yHIbFi2QpYCdqoAKGqM1_ID-{je`TZPQgy=dE9m*rKAsPHOnu_w<WUl-cnL#c
zhd53eD>WbaA&-^0_saox-ZM(nv`ZOcaF>{avkw!_vBawMhdmnESh~_MwRP;W5XD}3
zW+UQICmf?h>G+7pGMA;5tYZb`eHzO9M>AMBIaXiHHu<;}j8@5d>0<$2$#^Bo;Ku_T
z)veSgJx)4Ssb1_SvS?*3vkH2#G{(8yqoJXXfI6<psjt{4InsCwcP7wbnJx*Z6zhrz
zt8Bbd^9i5wSafxeAL6sf*;r1rMyK$u%)v8v@L3p_s#AgY=>U)Q$jij9pm$7m5uXY0
z?8)PF)g1%uxU{5F^s@nCa9iRANztpac=iIu$>Swb^m7589^cqI9Ub75rs&l<h^5PD
zK`TaM@y~lacD3Yun$;+05nr&zLMuDh<YM~kEH_Nbeu1GzS7bUs`!LbkpPF&Fmf_i3
zH?9AZ=oJ<8&}nzq7a5Yf$<smEhpC9ubqvW~4r=}HJWi>W<x3uko$pZ2;%ZhwPlaqm
zulIO#hpCH{eVB0SCHlx;4lw+giIYgVg1#NCMBKnoGAF&;=(q?@G+wEBgs&7JmhL+g
zlzAGJ-pDbt2i8wMo{3)iSMxCS8TgaOOMHE9%Hp|uYaL+6U!Rmc+1Fx3a~Ez2w1RIB
z^`O5Vqm}yNq~i6BERwonYVz?))rXt2X!>5Mu=(md=Mri8O^#{q7A_IHf>w;qwzwsW
zgx)*dS6l)wQR}|t@$8_OtT5;fWCv&;Cfu4OYTd2*c;O(q^Smptq@w+Z-_9bIL`%&?
z-Nw+^@d4pvWykSLeapYY5Th%lZN|B*oQfR0J%i^j>~Mfn8pH2q5ZzrR4$wYKG={1D
zzjrV^cbT>Y=sYbGweNc#$?PR{H=|4*FH!sM%;0V4y!(CzaSHM-(Nf>V@a%OLE-5=M
zm8p57AH+x{Tb0y4#k(1nx<TDV$v#ZD+$ZK_f5=eSrR)}><C33R&$%bUGWWbs9;=|w
zPV=!p@>urj_{rlW((lI%2bT&rx=Y%dq^xOv;<2>lKEL38(y>c@+3w9C`dgD6;FQ*|
zpT>ylCZ@?_rFxb3Ww7*2RAfZgar{zw__G4cu(M#}I~6tj{ups#y|S83Uv`X@=y&}*
zgGFZZ-L4gx#JD{1C4C@9n~Kv#3uf8qgq2@p@URM4B5tX<t_NeRa;aF*Qqt_kf5b@q
zf-qY+Tr4MAB2#tXp$sCsb8Pa61#?lf7wP}Si1w<o$)hDw^p`OjyFbt+q<fffiY7*o
z|CvE-v})~IW7MH^Uh`7@poe3;lH>5y8svXvuu7lnmH3|jDn`@y$xXVPq~=imJH}eE
zq~sh*szv#=LDQE>m)^IP7;*e2K<cilbB-3QT2&n?W5tmAlK(b9)KLAn{-@Dw;}&q2
zd7XdB3q~EZ&hxtf6T7qwY-HTrF=i?c9|<ta_ES=)vHVYf6b^H2dQP<EO4Wf!L!9cm
z(&Z%8Zv8$)!xH)WD(eb$oL8xK>#-P1$Be;snvSyz`X;mj_=f_t-g@P<+UV*Mbs$wr
z9uH7U?gdG#P5d!~Llc(S4O|_kXrdK+A{TQ>1Lh)SQnMhIwCeb$3?gs5I*nVZ9eFZ9
z>aCQFQ$Z_9D-wSWkg)K`k3XE!`u3LqsSgwA^=f!sL#HX2s@;FhL8H3ni^f!<J(Yvj
zk9%TEpcUj9ecAq&hc*RY)6{q1=?HVQ^miaNukrT)jShMkZ#RZ;?pUccm(-W%nHce?
z3a%}+Wn^l5=l=+?P)=|ueF>~oJ$N>Q6=$4{<Ai$)1z$NDf&Vi=TQ)jYvgJyxu{;-I
zsmp>($E{#CgDkZF<?vcFyO-j|Q=4$^VZtd`FcK!KjORVd+~vnPuie9hF$zYbbe`6~
z1B@BY?@5&Mf>Acj@CvDKa9dPq&8@d<%d0~k$R^!HW0NZ3(&Nm>p0O#|$(5S*@A7Es
zpo_`JDYffoqX>m{N<020r&{%mJ<eikTw53dmYkJHl<iGC+L9$@%66ibZt5}mx(E8_
zm&l()Og8f<{rw9{U?ghk<^e{jTAD!FB0!nxqtbafOXSLy0m`g{JWQu8L8@-PC`4gN
z)+tKrY@w|f$`DSb9EDwmYIMwmMv1s3TGJO>R4ih}$3k@)yO{;^YulpH-rC@l>zSu|
zS}$P;{o{lEYBk>{TIc01(I45yVVTCQ?2h7s9HM#pm*!#Nc>bQ+C>xM=npdeW-?kR9
zg@u(W4l2ia<@Gx6B6p0I7<Fvt(E7t-zoxOSOiwwr60v=R*<XQazdpd3O&fL^%hVe9
z%RC}874pTiQ>@gPhdVedGBjB{IyT;(i|_ARR$QjEHoV;6*?F1LbIb)RjdWJljxm<r
z7~V_P+C9~^)l%b~f-lW9;JzZlor=@QQn$tI6yedl*OZ(?mYDf^WrR63Cy=H3i96>Y
zmOFbag|<r;Z65A__LZ>XT9K-SyXK&kJ#Q<u7V@erp1$-jJOZiXIHpo`S{Ab;TIxiv
z-SW|<<|MJi7i#x>Tz#HbiPDq$UhI*@TvTDpXdQnq5^dX_5!RHnGX<j@T9towjMvg@
zlRhasja|yZbFT=k+?rOZSFv{vQu(vFQuS}27;%HHY`rFnM(rn?MCdf%QuA>8=Afze
zvjko$Enl0(Lyco#owuBcHJ|+=v~p``sg`8_2&wEzYN>U>0}K`&MLlmx@5(ZF1f=Hi
z4`evq7@Jg@1J#i+x@dYs<=1qExNv+ln$GI9+^5p;Acl18;K_mU#8l$UU^uw_pj2L^
z<{W1_oS{?H&UD|*cV43kMh3L^V^)k4U-{fQT0vi#&QF@n(4q^rI>#yKd(b-S!3<{#
zZo^0*9m0?@mtA!nr<nz_Coe^u<~XEM_6do&9O@C~H(?MrR4LQTPt6S;=8?LqgX1hh
z9oN-@9vZC?z0RYQJjEx~e>vQvVCbhe+D$=QS1>C`r!T!eK<sVS8py2@Yw5ERGtx(R
ztg_a21+#UuI`syRvZUH-RaUb}mySzBqJ$rr!6|cpO{yO@*CVN$7)&nJ_@}nhn5D+e
z^E@6c(AVqPV$pGHlq&V}1HAu@Qr{gQW~0|>YNmP<3p^I~!wlk#NdNim7$;G?7J8f&
zm1dcF+f-lTD2IaSzIpW;8xVI~CQ@U8MKKQc%htJ3R_AC1<6&BxUu@8DQuYQrG2!R{
zqtuBB1)~}|OW+uX(mJIz&|voO3r5FF+{&IB-}E@l^|g`O>7`e(QdmnY7HwRZ3arF8
zf2qS-A9b5MuD6LV@Uam}>3ix5`h8@@(`&KFmU3}s(x>w}mG}<7(O_Xiy`EfR1H+Ef
zub{+}?Pj0HvCAK2kP;(~euG3a1yO<MJmnJOsDT)#{5UF+Ps<FNvPF%e)oDJZ)-;!A
zaN=lnikAB7uCQp;>X=$7Dp4j<t3=0H9DfWGHM9ebB^5NlCBF2Da(KMO#Jo#ns?>Qo
zOq7fhEYA2y=`>9BDc<C8T5C$jC(%-zXmFG*bY_4(m8I{^9%;N<vQDLJI^N=Nct@<`
zm?XX!D;dI+*mNYmgeMuSz7@2f1gnQudr7rSpC$2auLM{NdrQ>RL=74YQEK%veg8y@
zFcjbfiPm|GkQnn0TbxR*mFebnj+Drsszs_|9(%z-2baj71j?!aWo8MxveXRK>Hy{7
zSsNxUYZyw;21~J8fU==d?BoEYREi}=eWx&#Wphg8N}~O&JB*=5bNDFi$Q*C7MxgUL
zlc=2|4r_I-1+%!hV{DsB=SYc`dNe{R+rvujm})qzk;dBUKy8GZ!F3+1MEZ?6Bszp|
ze!W_0`a;n;QsTRCYKYVrD>LJo=<zf&I1ASMD0SRVNR)_HfJC<nm$Qyc%?-74u~=uj
z^Snxo#>O*fy^V6G!ly<_YxB`q<fby$)@9H_Gun=^66v@;7mG!#^IS`m$I}9|g|&Kp
z!^Q0MT&yxNOU=%|H9*5M5$%l$BiC_CrdImTi1E5-b{`rK-8)AtXoqPR<lACI+QG8A
z+Ei1(zWM4Lv!F#L8}Bn?%$~+-TC@z0*XX!DBuePpW5g(WonocR%vmwk@>+ErozYMx
zH4DZgbb9+c0=&g7Dxw+V?vC>>wfErc0I|1OZDCJonb;Nd8_9a^odH%KMks2UomKd$
z%)4^}G%UzXMSV}yzIVlV{|mM6+!%9%*S>ehh#Ro>ofl(GMeR%E;Cli*eeCg6oXL{v
zlb!D{sacuZYE_#h_Zy~GjNj|fTI|NqviEnU+LsGFV!P2SKlhQEQ+}VpqCGKKeJ?bx
z>$H_g&6i)uFiYDF7R<OzL)-a&h7(52H`X1;E3qnmk;SWy^fp>$z6hz*`+!FoXr8vB
zQffA~Aoa)!>0*zwyiy-$6A4q9W*_t@VN^Q*Vki1PmxL%Zuy2gB4C*vSsXpw79F}jI
z%HJ=XnzgvpBTmKlI?)^XaELS&yW|VLEo2RO8AB_7_h@QW=pzhiyjkvk%+&1cM?FqS
z%f6J|!p9iORNT*(va|cR$0~V$U8+ufB1BnHYp=-~Qgz;9BxbWN_lQeyh6{DP%gmc4
zO8F-lQu+JPQu94mc%)t!IJMi@?OSi~ORhR@2MgjwBkNBwG{0bNfc*!!<6KMi)volI
zeN%XcOF=my8>mln#Q&xHR6di3S@J%WRDb)kF<$B0O;Q%9S9vsJ0>5ET&i`BnZ$qD)
ze{}}2{K@&L8JW*#@XFW1)cEC^3|?Q)f-*He(IfeS$8y`>!wJfc+q8l{@if%-YeO_F
zuWi_}`Na(0hAx}eWe_)P+58g2Q@1)y!FN4X7q0hs{B-?N*1oBp%a=2F8~SYh8)C$=
zXX__svcHnS(r4?JJLf)?o;Svb3&*i1RUIW#riDq=`LD)^8~FVBn=*Li&Yw^92)`C%
zEvmH#U^n2CveVWx^<DUSjJ9~F)Y;{!a`KG~79EUI_6Fco-}B}eaf6?*|IG~Il$@|%
zFvF$pFv}n|hAVYgk^2JEd4E3fb^lh3SN=r*MC@*j(J+fw_I&+RPxsp~9(G*SPOsoZ
zTc4aA*T2Lp*ljUhKOIg~0xMyQ^qmYAHc)t|j!Q{uzT@^7tK@0^sdHDp8)LQ1#=k|4
z#wlguSMZgkU50m9Oq`K5KboGJ%JT6&hGSwiuTjE=C)HZs$*}rs*v(&^3dGdB`S%%O
z*|YCbv$1zsoW`m)4vnd{ht`xS7m51(1B=wIH1XLshktpnj>|@(KHu%I8e=(7o!94r
z6@A)m|3iZ{(9=J^S;a9AK1!YA6ntH1C*D0C2c|6^q~ml;VEo8qU^%@6MnMmiR%(81
zF*4WUbsUp|nJ(J<^b?EHJguiObV_s#LdQsn{J+;AEgQutvf*I9^ZZZ5=%)^+juT8u
zpT1Er7fLG`_c5gM=VGMBw?E4wmA0QswRZOhcu}F~Jhu`p(9Z*;DL5S?f%QOumD#>C
zrSbZO!)jKi#Fjb{uLmubI_JE6yiy~u{|K=37q@(_rOwoSs2Gu2kuo+@1ueX~bu2(V
ze%bO&vQA~&_)CX2wn6vA|7VO-eouU&#edkKEvL&h!$X5RuWJdM|Kd1r9OtGJQ;PE|
zgF_|_Wp^2-=5PKx#wl&tkm^DInxPG-zD9}uOu~BmHx8#crf%Xns@@nZb*4}1i}~9a
zZSiP4Jm|b^6wETvy$Qc_c&!as(jT!XIAW(7Zffg1Hi;VbKOqj^q&1~DkA^sWo7R-#
z{GQ{4Tid1-=dloHgQeRaLY#8xmMGPaa~!_Gt>aWnoh9~1gHjn}AM?%;3dYoQa{Ut)
zVW`0%I{kvD_EG=IBDDC`KvRkEq(x{~*wH*4BNVjI+aZ<y%n;Z{)+x=2zc`G>s>y$6
zQaSNgk5HRD!mNV6A<Zy6We~8rquwZSb4@B1f3q0X8awu`)7(y3tvziKuy?RkTV1A1
z6^u>joWQ?36jt%3v_w7Q5jIeX`iDVS6*N$t=EclJ&3o1&)ElK%T~a0LpB4l6l5CiI
z@ti?eU2RuK*G?WIl~VsQ7&R<1GKh{#RBHC)d5*Dis2&`+);UsQ4dCAaQUj*~O+M99
z=Vyy9eczT!wUxNjH@H@`(~_LB9F-QSHC$U=ZX}lImvlLl)>vhT73Yp~CskuMvMB9F
zW%BuwD$N@kgnB)I=sXsQ7I+hj(O?Igc8-wfU2SR+f=F~+suEw^%?!dw$uDl|tG>C#
zsE!PcuwxIVG9Ftnj8Z%H6Ln)tk3q*B_=4MUo~KglMIHqks60rgsg)WRY-KRU_=Soc
z$0Ak#UThFf#pVDOiH<Q+E#lS&18ZJkcd>JnL=Aa~L202=I#}i_lEB!;VwC;9Ct9nQ
zS`0pn>NurRt@^eep+tW%)wABtB8-mVL)$Kal8VRn7Ufi4bUUtpiC*c;7y^q#$GMSe
zM|Q9nD7d2xq|<n$W;kDNG1|?c3a+u-P_6rp3}+29p6WO*i8}ZShOwdIv6IIb9xwAH
zOnjwY=`rd<{8F6GOIHGAXO6-@xSgXU@?;l=QmO}^z}S^xtSeKt66@Ko@))C;iqd(C
zCGuq&N8-hJN>O(6D9s^WAv;G&<jn32rOJk+Q(5cx@E91pO}@94YKiys80|)x8x#|1
z^=gh$W?m~%w)P4z7)a;2o<P}~ql}kAN!S4G6QVGfsVrTu@hIbCB}!MSFTbzB2qv#P
z&XH7q=(Qf9Og|!#=lfX%?61Vx^}ZT*9FNp^VSk4*J_c)m5-6#W&jAJntL1!hqw|;~
zYWsl}BbwgmwCtwhG2LO5j7Q?Tc#y?tubPAAw?0&^Cy`iponesHwlYJ9&T}SFl4m-c
hsM|fID6<U8x}nwv?BmVmDCKe{QI-#8D7=sN{{c)9->Lur

literal 0
HcmV?d00001

diff --git a/lib/vulkan-1.lib b/lib/vulkan-1.lib
new file mode 100755
index 0000000000000000000000000000000000000000..005a5efeea0c1bfcdf465c5dd7b3dc8a4c350b89
GIT binary patch
literal 38268
zcmeHQU5uSo)m|1vthH7_t%y>zmLF?vOJ`aXu|Ly7?ca2!f0ROdI_GWY&@<-@XU<GJ
zg_i#!P@q6TMEpKu3?aq~FN`r?j3LGl!v!&3aOL;K81uytFT60uc%k33f7X82-tT_*
z%*z??z9x$^@7incXYaMw{#$#!FW25u>x_)vI{(W{{C}&~+`Ou9W&g_lRX%^EeZ2Xm
zzSZW>BlC&g_cfxeuMlnfHPH?tI&sm^<PedfGm8zKJ4>YL@Aw5he%jDCA12cDPy8xE
z8HyhLhoJ|z6Dc|l3`Lz^7`p#TB1QLEy8mT7gYJ9D(1Vaubm0v{&n+iXbh_WruAdVr
zI{Rls&(0%Kba92DGZ#>A&@<-@J@8v1MT4&z+P#2C(F6MoJpwsJkE*(Zp1^+;J^On@
zjiW@0CQwI3FB~-V$YSIJy|~a&YaNlIQ%ysoe<M;fvE0zZKO|E0!kdPk{|k|#=DUVY
zUQMLv>7|D1e<D&ee!rn?50RpA$Sb<%XNI~{M2a4L(9j9wSJeH3q1NlD7wFXQ3^k!E
zMN_K{o&PbBqGwTWMfa>XbgZ99(c~`;HG!dM>NP`SZxJaviN05Ke2JmX0Qwnp>_>*~
zeVa(p^Js^nU0V#DJ&pbYU4(5YYCuL&_fkV+2T?xg<at9+zYM*AvR@gx_YlejJ-@)v
z&fgFzI(xaH!5h)vpwmAv^c*l04Wn*~Mv$hccEHe)KM^T93>i(!@C#C7MN#cdLq~r?
zr0G`tf`(8BO|Y>c&@j>!-LcRTban^mAljnnjz1bYu!2Yv@D70X{?*XFcZd}2x!%ye
zqj(1G-)LwAI#6_Iv7x<aucp)Z1?{<L=<c5qDLMk3E4mA1Dyl<X(IMa|+Kn_t2j4Pu
zH*}+@K4oY>%20IIF+;oGg*@ocIYW2$6KQ%4zo6+sLr;C5NYRsvETPU%f}Xm{P#g7B
zH2q6M&p=Mm&O?SSpd3Yy{oPUvzo3Vg8G2|rk)qMd3_bKZo<XC~fuc5W6pbTY(fOr{
z4j<mSV{q5dp2Gv1hc^vv9o(~P=<w8J^F*!nrB!tJ@Xo#4w-+Vo0TwX=dv4#mbKBv8
z4SO~qOJi*O@YIP-ovhZ)wzej^wbn>R1qRCvWE0&^ds+yF9G2YNnjFhID0FaidZIB>
zYYt>njgf2uWtS##-rKUSpKA+hH`&pd%hSqpw>x9C?qH`qo^`s7l2SsM($cn$)sAOd
zLh-2+g^I-mOUq5yPo$QXuzNC_%(m3V8qH}T9;sPeu(aGA*;u<XLy-j;Q5stdDmAj9
zUhlvJpsWH{<S@44%LhD`HkNA=o!#0;ma#VKg#2fga%Raj+kC2<wI&+vmZ!~Gi51E;
z*fW$^E>tLw>$tr(opsXn3q=ZL8Q4%vjMKOPmxI1E0@|!h=Sl|4>Ez+bqhpOOgyfSa
zvA@>nZml=fB&22BGfSDW9ee6Tz+kxz&1QQ96Ob`Y6c;d9Zc8W2WyAxg@2HKtBJ#gE
zb#H4-q{}Ck*wUPw7)8eMtZvE~s<n>mS*R?9b8c--)tZgEYC|l0GMUS8y<@O9wa1|U
zv8<)~pgfIpZ)~*c8z+w)%Q{X41(uZ6bDk$EIV=fWS?Ex9?_{HssY<EkA(bjh5Cs<t
zC`%0U50BS66SntDN+}f)W%;X>$)T$-n{^^3&KP<mD9syT9vgzgl4d^KlF4P<Jh6nG
z{aD0%WSOCCBI{}i)g$tWCD2PceYlII*>;;`u$&sF(1I(=e`YCD>){MzX|&zRHy}BO
zC2e){1U3HkrX**vtSNQ#l(BaC#1b|ui+GPLgO=)a*;2`1xnU~@g}FaDO>2-*!~>_B
zx}le`&bHf46cKV*(pFK57FaBcnmajwPBwXb6}X(kpqjzyDq$a4#=<!v|Cyy^N&6bv
z$)GgNVp*>QD_rEUv?*B+FIT8}VhNj<MZ8Crfo^m<7Nkx#(Vpy#WLN>)lXhM#N#oq2
z4qoGQwpiQp1ZS@~%P)0!v{uJ_+pbUQMbJQbOqRC|^U_@L9+z{N!Uh}TS+mi~Hq}N(
z^Rec0IfupY6CH@p<~-1?moQN4*3eZ^Hs{%qb&h9VE;XsdxSYeZ<+cu{jK;_WXH~h%
zQv94)3dbjLKC#NkGZqp!U!V&hg&daj_3R8Q%H}*lSuRtY&$&#I!?m&TW~O%5{xeJ2
zGIFti!Ez=y5=T0X@ou|=iOkibJdJbPLgU#JiJU(~@TS7NRL$l*g<@@{IG=OrDL5$6
zu5$sGbMWRdPlj%k&6Ff?KDD7N<cTm@e($*2?-g=H*__AdZA+&%mL1ibHD?Es$I>WU
zo#ADYGi1qRc~kmO*21hbSewYliD0p;EnB3xq6LqoF(DZ42Dl$EkXw_@TBp&SM)gaR
zIIpRd+RoNmb<a>FkELzBWRk0xWU{=r51C+%L$X+w4Zx<Ebx~SIkihwr5d=w7Cn8Li
z-<Tb5w7d+^Q-sO#n_Klzy1-&t*<QD>N0=<XX{@gHo;-IddkZ*15(7moiR0~#T3jJ0
z8NS@`NMmB6t>-Z<#?QXgcC20d8};s}D=z2wk^|XzcXVT;rj1sYm~(u|jZGN%rndHg
z>2Yo98L<64rmXg4s~(nbc)s*7ya^+XCi(=24xH#FPPpnvlA?L{V{}Aw<_Xb!Lm5tR
zd?YZ6HI>N|L_XECg$c;^Z@>?7ZN(gM%Gr<2BAJ2wJ~-q!gH@pd_t_%PK&PfRl4EuK
zSCFECvbxc#5@1GA4#&+AYm8RVaEg-IsjQB2iW$5FnXfT1Uh9rn+Z^Dhh(u7Vm2_N@
zV^e#4TB=n%2U2=dX*8gO%%hOm*o1dQc`%Jk{wQSf()YB5Y>^%0(XxBmVMTqm$Ya(t
zDeiHE<d#OWDdohoklNJDYMo8(X1gP69WX<AJ%;W03$qo|!rU9WUF@ORa{+_bb&y<<
z1M_LSIh6?%k0T^8LGoeIOhe{LNa(p)$cSelr4HL#$PO;?ETniA-&pH(8fFtY=B9RL
z3VFTe=p&>xBDFA?XCZ~PM##7)A))K+j-sZlfG4E&6t|}_hTRJ`yGA<T329}lu4oi$
z&%UaMVksi$AiQ<KI^eT}tk#6s*qCKdsY5V@{LbuD7YEL9yNepDONNkxwnPECRG%ef
zPh}%G#@MCacJ$_e$?|p|G(6ckRvXE--97|87I`eK4s71-jm<YaIj$CSlYJIKGnKcg
zHeNg0z|4fpnRKC{<V+<S|Jw|O3Za?G8|=U_*6QMLK2uFYNiNy!TZd29#z#hLjh1O{
zgvs*UJAQ^3mvf*)T=9CY$(tYHbFK{|+6k)+QfD$1i*Y#zhSy|fn`xv&4oenHM6HU=
znz&Lb>Gi1GjHNbfE!2x|`(`M*C@jind?jR!KC4@1uGJZ(iX|!oid4~-R7~X(jG+iI
z)4C{|^FaHuhT#x0*8sLdR6|1nF2@5IG%69(NHW<k1Foq~R(Hl|HE9ZnrTMw{H(EEX
z_GU1XGvsrwg7s-ZYq+&8?)TF3SxUTm3{$_0Vfr2sy|R_)*W2(1LYOX|Km^7l(c&|R
z069nWchKp_5iRgdqJM%2^p8i0wm*n`$BBN?A-WQg4_Dqt^fG>5!tXDT_QnOG<<AlI
zBZ}bXyNLdLmT2CyL@N-jZ~<kXdxq$@4-mbI=!6Bk5uJg*&pm?jQTC%x5d9vqM;j<}
z0udE2pgu@j_#)9dL@zW?5&dlx<s<Ef4->tK^uIh$^lp>r>XSrEpC<ZK9T8FEh~mh=
z$M4VXLBvfLQBf#=Dcbl4{QWx0`W?!ehkUCMsqy3UC>yxzA=`h9=$FvL6w+Qp+FN62
zKgwQ$HV>f9KSKK3_W}dBThPAKXx}Sn+uNw~r6~I#<j+I?Wz^+YsKX)1E`aQBb`o6<
z*&EUBA3!&+Kt}_^cpgCnOAYlt0^VVwWuW=^`%UnEa+K&+(4isd4D=JoFT4YF1F2}3
zKjL@A0ptVybuZC7`-raJ1O4H5<9^fuWiLiV$mK{o4Z4W5pWcnSqx^U70w22iE7I>r
zKfHzV29P!d-cOPD7}DPb{~Xf$?}RQugVRLce~M_)lPDW>75={1hQ7dGgnm7QHV$5Z
z?L3C4DbO<X(ej6gE<>Naj^ESxy$bS|p<k9l=U;mtBI#%zy-0^>H+_okrX_SOJwi{=
z32IV}#%PMp(0z12b?GEc(kXhJIy6BoYSX<m4wHF`&eLgnk{*V6K1h$zSvp1;9j6DV
zP9ro*_fUh5(lk9p=cu37(H*pfcG7nG2z`gXODpI)+C!V^EA%zGls3@qbQA5PZ_sl3
zI{gPdPyb14>1H}eyD&f&(U<9Z+Dl)hTj~9D8*QYmw1)Q6Hu@5+r>E%!T1<!OV{|2b
zl&+x5=reQ|4bfHfIl6`x(#Po)dX>ISE9pl1JT0YL=)?3feTzOom(vGn0nMim(HH1h
zx<Jp-GxR;$M16FCF4AY|(=<TK=q36DeUgsQois?R=>}RwSJN<k@&7gVo;7y~qL1E7
zG!xJ3sl7>wPtIkvp#j)N<&N$N7Y)U^?L{`{O8DB8*TNi7!#qgP`>BAhd?V3gQ+T9c
zaFqdNmDJd#!gs*#6xSG=#D~0)U+Qa#y9U&0y5x8p9=}D+byE5?oqITBKF4KGM389C
z(lMs<Lj;M1L!n4l+a&F}iSVG>sEbJIGwpLKbr=~>RiX6Qk$KjqK+%G@v!vwY$z~Ec
z_LrW5^_e~Hp>W!kYJz<32N4y8*JBp$R^`m~Gf6kBpHiq4Aj(|DaaZ$fJ+o0qD$s2H
zGXJdei;H@fIl<!2n=%)O4(PN#Ggr7^dY9B@18rRh-u0SG@vln#67xwZk~dFO$~RZ+
zVRjW6ns=(MWpKs`a){%KI0^VdLFemIbr?#3RDS1Gyj%7Wg4stSXkJ9Q9IzLF^LR5?
z@qY1~7FAkgWNCciH}Xs4yWfE8LT&=S3_>U*6;5w~gmhN;No9vLSc$rn3=G!edP=n^
zF_X1P__1rdXOxB1qE$K(&nzw1w<^o@&5D`gK9-eWlYGOG%vd7}reia(eK?Z}_`XdA
ziGfks)S^|{gos($l+do2T`(+8h*=h=<$lRPW6oKi93FX^>LE2R>VQ#ZW0ACW5tx$;
z-8B)Tv1r`93>&^~M4>G&%(fuLeKe~wS$M`$eeY|&?#&c9nz%(ygdV7*V4DixiQLwD
zA(E^E<zLdV{U)zcy^Sy}NWkS~BESnNFucip3CLYm<*u+?M*E`5yjFo&)Qcx-qldSR
z^b_n@WFtz}<JdI2Of_1>L~YJSun>(VD#2zG)1q%F(%<L18AWmNES}%ARNB3eQ7#d_
zAyF!+HB<<cEfqIf%v<GV1N4XAq}j-5yDPjiQ3`H4vSN+x4kclse!Z=H57!dq^*0l&
zaCnxArre$~lZ++Qfqo*<Ehl^fC(_J{5=Q<A0XOAqb!#Uvr_L(!9WnF9R-A>Tq6xJj
zs1vs7VwXqr#VrR{LG$g~3?8qr9O>gxd_j^)-Y!KaU{Q?iV04<l4bdzcV$GGozQz}d
zy_=8>D^o3j5!!MQ=@!2tvc43rh3XoTy(oDzP$ijQdnX#i1Sa}Iu01GGv9vL`oW2_+
zdi`pc$XTdI#A)J$-QLP^d`F34`@s~2yMHYdx<ztk(IH~eoYTWxau?$+oS0TdE~Oux
z<HJc6Mxjn|&JOQsE4w)i*NeA7n(|>7JmKpo=J7esk)g}<T^NQH+*WI*a9ikS>9EiY
z?jm+o7-sSIUW^wlqMw~kOl!c)T@qRfwi-Aim~Gq*#h5Zb4$O=_&XYSGn8WS{_dH{C
zSngs7^5o71PEe<aJlxK6C}<JkN-)nVuFQ+A&}T`zeTIm5)F%{X*+H-8F2VDi1)Pu%
zKu<nr=SQ(Ti#~(7M|191o5IM&XW5;k>k8cpb5`RL<>8-s#bZCGxbdb-(odc`c!#Ep
zN{g_K#v|?R8H|NJo7CCGsf&g9xaFSRIm>b60ZT9ohjy1HkL%0~59%&X#2E$pEkCkb
zHCUc+7M7X!v+e!@U-rLCv<6_Gxi9S_cP2lIBe$zCcbb)vy~eEky=TkeenUqF9PY`0
z?Yh$PWnWsB*O``=_a-w$cQVWGPv+)FWmiYHV={vycrQ}D&>0+z3-5Twj;{rw@YpH@
zcmT{$4vQs9^0&ll+fa7sjRs8{CUhn5cMQ{Q9&lig8w~FRXQCqF$6Ar*j$(p`uE$YU
zhE)^~u`)G!d@Tq>53X8D4+;5lnjRFAp-tX2yVyvbSb9L!2u7rEG&N~<?D(4Sd$0UB
zs*8vVdk4{ooI8TX(!#@ML`FBNaO{i-8pVmjWJD&Nsx6{$$cz+|I$p-a%^VOOEXyUu
zk+NJUj9e?16vxPjQ0f3_#0y79&302KHf8xMi}B;4&fum!-K92~D0xK0i)HR`7=Ovk
zzI-kYgq9<Yg8obF;p^)D8zXMj)VoiAeblUcs<Nx2=JIl<AKXC<>m(=0L*3Mg=y7i9
zgny7L{~8E?CS*mu*;J8DGkuY1&CToVx0#O2ub;bwo_^tf4=hAP$>j(pQvX)e@jlY=
zrHGj^|2E<WK_}k7^|WZ?3iXbAZRu)GxDtQ74f=y+T!P>AwDJur*KgT~>cBtXv7CBb
zPfOmIhu<p@)uH|^kd3;Iu2}Q|Fn^<ykci(i(;wT>80oYp+Q+&}t{=K#$@Z-~x7l>>
zM+vK%C_<!>{%aBb`+ohGgh4Gv@RkRvduIB->0kenc_2)xe)EW1y&fT3iZjX_-9Ce4
zW!5Meh~zSTIY;rYRsXU7R9S?Gn*859#Mt0#K<_EqV(zSEbsvA&OIgc2Z7u(WdYSsh
ztp%Y_-alb2H*tcpme=^f{wixx<<7ko%%*2Q1LV?=@ywT&GMiDhdR=jp{SOdQdP|J5
zKC2=<O4TTn|JKub9%XBouSWAb{9u2T+}!JTHoE;RYA|;;+1F<$uqEmnS!$%#&#77L
z^SYgvViy;6df6{7sd<tr_h%n>XMnj3nD4{9l6%hVGGW#ZY=I@Le3*?DrBLU2sQF5S
zb*5KT{&-)|R@IA_c&NN9@2QPz;=TDfpk1obA|YH-k)DcYrBxfvyCi7e5+J_cAT~lb
zW=}2TDKx;l26&e_JiQN!DYUY!uDLm`Q8mMTK;s1-SlPJBXnl$PyWB&QXK4v3J9Ae3
zw>I9sFQ9!NbcpD})S#HtcFvcm@T0nTKH0SDcABu84@tz_@gdb(y@ma8y)K1b7f8Io
zt1dxhgJ;v1Yf9$TtAY7phnc(Vd%;Sr8w(k$cpo4ktLro|IzHlI%6A|oWL1skUt;xF
z#~Yy0MIK`0DnkNSZfk1Z`KUt{7bQKR^(CzJV?{KMMV0ETJR(!pdWFQ)let&C_b{80
zk!s(S1+4r6Nh(z~c2<}rE5~)PosUb*;&ETXR=1pz8U541{RHFYcV9hWrDlOohFJNX
zSWlR1Zi>&WpG5l>J4}6V*Aq@(V(0cLiKEvjbt@?0<io72rDX0_d*Z7iXmMp$Mop}W
zpAJy%2LNOv=hQZ@X1x5iv?nb7R(5P>`Wb;WXFI9H>hxKM7hd$rCRXje)EaROBg$K3
z30Ar9QsZF>qj`7K60U07Qaj;m8IAX9?gG3g#KcMFa}LqJUGD`i)q9_3Jm~|KVAXrC
zWY$!Ev!x!ITaNq*tvX_2Zux>h6k!LwwJ@=VS;lxc(1$?{611|FCfazNz!VV^60W*V
z6TNtS0nf%`^n|!3v75L-AX;~PPgu@98adnFD6m8<Nl$pG5%@)c=OR^lLhCQtPh5`?
zxLl$|;$S3jWgAWGEWT946+t%tF4Pr8R1v|`6KaW$mACE70yPg0>Ip3|`d2cVAA2O>
zDv$n@y{=+B8QIhmT3=$WS?$n*xT&5HQzyck45AJS;;TdnR(Vt;R{J#tEEC+*6JA1_
zeFD$B7?psPttI8}?)MRcXf+92S+9vzc&$Lq-HGP5w-?Nmmu6iQQ^Z+G;L18qS?tXo
zvI*jnU~{S2uSl#QM6egk{*p7za?Gb+En*hp4|~B*?L2M~*mJS-NMWh(GH^_wjkc4`
zs%$f<dG%I_B}4goLri$0zAg|WVV^zWrnI_V;6@yL)wP;fi#LQ=#p%2{USg-QQR2DC
zN~y}qV?S|AyGfw&f!M}qO33OhOw8m15ljvMm7tZ|ndrsM0&~vJsEJ*`7J)ZcKBCkP
z?lytvqGF{otM_0+o3~4}Jmj`Fypq*I-9B%XcwRiOgsf^Wsg?g5K4LNWTLM?tZKAE)
zB(fJ;E+MP7wXftPu5N0#3&cX)x&*D<&XmRO(5TqS<x%jx`iv60<ef#dxtPZj{kTga
zmTt`FvL6Q}Vrq+AxgYtfYoRy%Mp)18IBLlzxoUe;7Bu7|7dOe3+nL&t4@<Pjwy|={
zN<1s-zI~6xifkLJVx|1ydp)eOZDUo$lqUB{#K`uoDpu;QW`6;zIP5FqC051*9$s|&
zR@o|3p3OTX9&g}!oktUUw}Uaf$cC}9P7~|XoiWVVhOxS46Fay=618N@Sh<yn73waD
zSGHxW-onI5<8Fx<-85EiUqYXUJ+%0yu`*ubWOKyBE8R3!*Jz>-YaXU}#jK}YR4Pv5
zsE1g(U98^9gx!o-Ots)Dhq3wJ4az1~?#G1P)HPc1-Me1!)|RwRy+V;$JnsWG(HHW1
zsX>3q9J>NFI2Nlx;a$I8(ASpeTLrBj_tE)X!@1W$$^K_0>M$Cw!<_0Mb)(ks@%=aK
zdX4E$_|xvuxTSAu$}X=yq7!G869zZ(f7FqXl}B{~vFQ-=gG%r5@@q;))m5;mF^3on
z?XKFk{*qPTi$H2=q~z<dQmvJHGUc~uGj_>0rDQwb!@CzHtyXa$<34i!LTgWLO~nn}
z>my>1rd}MDaPna`adfF(?KrgjiX?w;RlhPWp{w_5iS4QtVIshezw<2NtG0Y?$(csI
zCf1Fi$6u(HRj90iR9{bW9rVeY-_DkBRa=}wn_@KYUGQGeQXZ6(j3!<@@n1CW2|2OO
zp7M~(-(v3xzr?as6#TT0UwlPg0#~=E#OdI)LoU34-xKEA5^byZN$+D!e?O6Uy<b+P
zs(r65nXA<cjQ6K&;(t3pR-$qrq}Hl4@ml0xCg=^hWS_MP?LS+BT=?!mPc>Luvj0&p
zC_fOdft~@1UsaIRsA>oO2|YX*uSM~b4YC5&ZJ@*w)n4hLcn$J})nCvnW2gGy;W)P5
zM-)FdAyrVd57w2OH*bLk9*Nf?`7MiH>aw<ER$Pm^JX%thk`HCbYE<u;5}l|s_+!a>
z$j@-dDpc*2lqSw4Yf$*chpfh&YU1%^J>(ZeWEHAvB4wZ7jMpIfd6Hi0l6u?f30Ifm
zw@-S)EEyB(J@zLV)A@iAR&PG6A|WgHddmOs6k{4caQKx^*}dNUIGZz0>L&O+<Ho)T
z(_3px)<P8v_OwP#e{n{F%jVCf#cN7>_fx=rMq`W5*+`(ueVX#rK5LL-|DX2WFiZB=
z>Yerr8dEQ5@>4wjHoWI@Jo%xYp72s}V9)1x_FJ;jHmdtKQn$@7<XCa1cXewi851hv
z<)TK@FYtJuvG9hw_b{80kh&>-v4GfnY;9^hyp-eRF2EvWWv!-sfZxh7=i&yx<jzvP
zEBkVe#NVCjZ4{K)%3@%BTVug{`fqO2D;mf9WL8gYO0D$YF<9gHvQ?Iw#~gTwOHM=T
PiyW_NO#f?N{(SO(CDW^;

literal 0
HcmV?d00001

diff --git a/meson.build b/meson.build
new file mode 100644
index 000000000..e609fee0d
--- /dev/null
+++ b/meson.build
@@ -0,0 +1,11 @@
+project('dxvk', ['cpp'])
+
+dxvk_compiler = meson.get_compiler('cpp')
+dxvk_library_path = meson.source_root() + '/lib'
+dxvk_include_path = include_directories('./include')
+
+lib_vulkan = dxvk_compiler.find_library('vulkan-1', dirs : dxvk_library_path)
+lib_sdl2   = dxvk_compiler.find_library('SDL2',     dirs : dxvk_library_path)
+
+subdir('src')
+subdir('tests')
\ No newline at end of file
diff --git a/src/dxvk/dxvk_adapter.cpp b/src/dxvk/dxvk_adapter.cpp
new file mode 100644
index 000000000..9d65bf38b
--- /dev/null
+++ b/src/dxvk/dxvk_adapter.cpp
@@ -0,0 +1,177 @@
+#include <cstring>
+
+#include "dxvk_adapter.h"
+#include "dxvk_device.h"
+#include "dxvk_instance.h"
+#include "dxvk_surface.h"
+
+namespace dxvk {
+  
+  DxvkAdapter::DxvkAdapter(
+    const Rc<DxvkInstance>&   instance,
+          VkPhysicalDevice    handle)
+  : m_instance      (instance),
+    m_vki           (instance->vki()),
+    m_handle        (handle) {
+    uint32_t numQueueFamilies = 0;
+    m_vki->vkGetPhysicalDeviceQueueFamilyProperties(
+      m_handle, &numQueueFamilies, nullptr);
+    
+    m_queueFamilies.resize(numQueueFamilies);
+    m_vki->vkGetPhysicalDeviceQueueFamilyProperties(
+      m_handle, &numQueueFamilies, m_queueFamilies.data());
+  }
+  
+  
+  DxvkAdapter::~DxvkAdapter() {
+    
+  }
+  
+  
+  VkPhysicalDeviceProperties DxvkAdapter::deviceProperties() const {
+    VkPhysicalDeviceProperties properties;
+    m_vki->vkGetPhysicalDeviceProperties(m_handle, &properties);
+    return properties;
+  }
+  
+  
+  VkPhysicalDeviceMemoryProperties DxvkAdapter::memoryProperties() const {
+    VkPhysicalDeviceMemoryProperties memoryProperties;
+    m_vki->vkGetPhysicalDeviceMemoryProperties(m_handle, &memoryProperties);
+    return memoryProperties;
+  }
+  
+  
+  VkPhysicalDeviceFeatures DxvkAdapter::features() const {
+    VkPhysicalDeviceFeatures features;
+    m_vki->vkGetPhysicalDeviceFeatures(m_handle, &features);
+    return features;
+  }
+  
+  
+  VkFormatProperties DxvkAdapter::formatProperties(VkFormat format) const {
+    VkFormatProperties formatProperties;
+    m_vki->vkGetPhysicalDeviceFormatProperties(m_handle, format, &formatProperties);
+    return formatProperties;
+  }
+  
+    
+  std::optional<VkImageFormatProperties> DxvkAdapter::imageFormatProperties(
+    VkFormat            format,
+    VkImageType         type,
+    VkImageTiling       tiling,
+    VkImageUsageFlags   usage,
+    VkImageCreateFlags  flags) const {
+    VkImageFormatProperties formatProperties;
+    
+    VkResult status = m_vki->vkGetPhysicalDeviceImageFormatProperties(
+      m_handle, format, type, tiling, usage, flags, &formatProperties);
+    
+    switch (status) {
+      case VK_SUCCESS:                    return formatProperties;
+      case VK_ERROR_FORMAT_NOT_SUPPORTED: return { };
+      
+      default:
+        throw DxvkError("DxvkAdapter::imageFormatProperties: Failed to query format properties");
+    }
+  }
+  
+    
+  uint32_t DxvkAdapter::graphicsQueueFamily() const {
+    for (uint32_t i = 0; i < m_queueFamilies.size(); i++) {
+      if (m_queueFamilies[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)
+        return i;
+    }
+    
+    throw DxvkError("DxvkAdapter::graphicsQueueFamily: No graphics queue found");
+  }
+  
+  
+  uint32_t DxvkAdapter::presentQueueFamily() const {
+    // TODO Implement properly
+    return this->graphicsQueueFamily();
+  }
+  
+  
+  Rc<DxvkDevice> DxvkAdapter::createDevice() {
+    auto enabledExtensions = this->enableExtensions();
+    auto enabledFeatures   = this->enableFeatures();
+    
+    float queuePriority = 1.0f;
+    std::vector<VkDeviceQueueCreateInfo> queueInfos;
+    
+    const uint32_t gIndex = this->graphicsQueueFamily();
+    const uint32_t pIndex = this->presentQueueFamily();
+    
+    VkDeviceQueueCreateInfo graphicsQueue;
+    graphicsQueue.sType             = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    graphicsQueue.pNext             = nullptr;
+    graphicsQueue.flags             = 0;
+    graphicsQueue.queueFamilyIndex  = gIndex;
+    graphicsQueue.queueCount        = 1;
+    graphicsQueue.pQueuePriorities  = &queuePriority;
+    queueInfos.push_back(graphicsQueue);
+    
+    if (pIndex != gIndex) {
+      VkDeviceQueueCreateInfo presentQueue = graphicsQueue;
+      presentQueue.queueFamilyIndex        = pIndex;
+      queueInfos.push_back(presentQueue);
+    }
+    
+    VkDeviceCreateInfo info;
+    info.sType                      = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    info.pNext                      = nullptr;
+    info.flags                      = 0;
+    info.queueCreateInfoCount       = queueInfos.size();
+    info.pQueueCreateInfos          = queueInfos.data();
+    info.enabledLayerCount          = 0;
+    info.ppEnabledLayerNames        = nullptr;
+    info.enabledExtensionCount      = enabledExtensions.count();
+    info.ppEnabledExtensionNames    = enabledExtensions.names();
+    info.pEnabledFeatures           = &enabledFeatures;
+    
+    VkDevice device = VK_NULL_HANDLE;
+    
+    if (m_vki->vkCreateDevice(m_handle, &info, nullptr, &device) != VK_SUCCESS)
+      throw DxvkError("DxvkDevice::createDevice: Failed to create device");
+    return new DxvkDevice(this, new vk::DeviceFn(m_vki->instance(), device));
+  }
+  
+  
+  Rc<DxvkSurface> DxvkAdapter::createSurface(HINSTANCE instance, HWND window) {
+    return new DxvkSurface(this, instance, window);
+  }
+  
+  
+  vk::NameList DxvkAdapter::enableExtensions() {
+    std::vector<const char*> extOptional = { };
+    std::vector<const char*> extRequired = {
+      VK_KHR_SWAPCHAIN_EXTENSION_NAME,
+    };
+    
+    const vk::NameSet extensionsAvailable
+      = vk::NameSet::enumerateDeviceExtensions(*m_vki, m_handle);
+    vk::NameList extensionsEnabled;
+    
+    for (auto e : extOptional) {
+      if (extensionsAvailable.supports(e))
+        extensionsEnabled.add(e);
+    }
+    
+    for (auto e : extRequired) {
+      if (!extensionsAvailable.supports(e))
+        throw DxvkError(str::format("DxvkDevice::getExtensions: Extension ", e, " not supported"));
+      extensionsEnabled.add(e);
+    }
+    
+    return extensionsEnabled;
+  }
+  
+  
+  VkPhysicalDeviceFeatures DxvkAdapter::enableFeatures() {
+    VkPhysicalDeviceFeatures features;
+    std::memset(&features, 0, sizeof(features));
+    return features;
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_adapter.h b/src/dxvk/dxvk_adapter.h
new file mode 100644
index 000000000..b7afb0e54
--- /dev/null
+++ b/src/dxvk/dxvk_adapter.h
@@ -0,0 +1,143 @@
+#pragma once
+
+#include <optional>
+
+#include "./vulkan/dxvk_vulkan_extensions.h"
+
+#include "dxvk_include.h"
+
+namespace dxvk {
+  
+  class DxvkDevice;
+  class DxvkInstance;
+  class DxvkSurface;
+  
+  /**
+   * \brief DXVK adapter
+   * 
+   * Corresponds to a physical device in Vulkan. Provides
+   * all kinds of information about the device itself and
+   * the supported feature set.
+   */
+  class DxvkAdapter : public RcObject {
+    
+  public:
+    
+    DxvkAdapter(
+      const Rc<DxvkInstance>&   instance,
+            VkPhysicalDevice    handle);
+    ~DxvkAdapter();
+    
+    /**
+     * \brief Vulkan instance functions
+     * \returns Vulkan instance functions
+     */
+    Rc<vk::InstanceFn> vki() const {
+      return m_vki;
+    }
+    
+    /**
+     * \brief Physical device handle
+     * \returns The adapter handle
+     */
+    VkPhysicalDevice handle() const {
+      return m_handle;
+    }
+    
+    /**
+     * \brief Physical device properties
+     * 
+     * Retrieves information about the device itself.
+     * \returns Physical device properties
+     */
+    VkPhysicalDeviceProperties deviceProperties() const;
+    
+    /**
+     * \brief Memory properties
+     * 
+     * Queries the memory types and memory heaps of
+     * the device. This is useful for memory allocators.
+     * \returns Device memory properties
+     */
+    VkPhysicalDeviceMemoryProperties memoryProperties() const;
+    
+    /**
+     * \brief Supportred device features
+     * 
+     * Queries the supported device features.
+     * \returns Device features
+     */
+    VkPhysicalDeviceFeatures features() const;
+    
+    /**
+     * \brief Queries format support
+     * 
+     * \param [in] format The format to query
+     * \returns Format support info
+     */
+    VkFormatProperties formatProperties(
+      VkFormat format) const;
+    
+    /**
+     * \brief Queries image format support
+     * 
+     * \param [in] format Format to query
+     * \param [in] type Image type
+     * \param [in] tiling Image tiling
+     * \param [in] usage Image usage flags
+     * \param [in] flags Image create flags
+     * \returns Image format support info
+     */
+    std::optional<VkImageFormatProperties> imageFormatProperties(
+      VkFormat            format,
+      VkImageType         type,
+      VkImageTiling       tiling,
+      VkImageUsageFlags   usage,
+      VkImageCreateFlags  flags) const;
+    
+    /**
+     * \brief Graphics queue family index
+     * \returns Graphics queue family index
+     */
+    uint32_t graphicsQueueFamily() const;
+    
+    /**
+     * \brief Presentation queue family index
+     * \returns Presentation queue family index
+     */
+    uint32_t presentQueueFamily() const;
+    
+    /**
+     * \brief Creates a DXVK device
+     * 
+     * Creates a logical device for this adapter.
+     * \returns Device handle
+     */
+    Rc<DxvkDevice> createDevice();
+    
+    /**
+     * \brief Creates a surface
+     * 
+     * \param [in] instance Module instance
+     * \param [in] window Application window
+     * \returns Surface handle
+     */
+    Rc<DxvkSurface> createSurface(
+      HINSTANCE instance,
+      HWND      window);
+    
+  private:
+    
+    Rc<DxvkInstance>    m_instance;
+    Rc<vk::InstanceFn>  m_vki;
+    VkPhysicalDevice    m_handle;
+    
+    std::vector<VkQueueFamilyProperties> m_queueFamilies;
+    
+    vk::NameList enableExtensions();
+    
+    VkPhysicalDeviceFeatures enableFeatures();
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_buffer.cpp b/src/dxvk/dxvk_buffer.cpp
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/dxvk/dxvk_buffer.h b/src/dxvk/dxvk_buffer.h
new file mode 100644
index 000000000..47983f6ac
--- /dev/null
+++ b/src/dxvk/dxvk_buffer.h
@@ -0,0 +1,38 @@
+#pragma once
+
+#include "dxvk_resource.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief Buffer create info
+   * 
+   * The properties of a buffer that are
+   * passed to \ref DxvkDevice::createBuffer
+   */
+  struct DxvkBufferCreateInfo {
+    
+    /// Size of the buffer, in bytes
+    VkDeviceSize bufferSize;
+    
+  };
+  
+  
+  /**
+   * \brief DXVK buffer
+   * 
+   * A simple buffer resource that stores linear data.
+   */
+  class DxvkBuffer : public DxvkResource {
+    
+  public:
+    
+    
+    
+  private:
+    
+    
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_cmdlist.cpp b/src/dxvk/dxvk_cmdlist.cpp
new file mode 100644
index 000000000..abc564760
--- /dev/null
+++ b/src/dxvk/dxvk_cmdlist.cpp
@@ -0,0 +1,68 @@
+#include "dxvk_cmdlist.h"
+
+namespace dxvk {
+    
+  DxvkCommandList::DxvkCommandList(
+    const Rc<vk::DeviceFn>& vkd,
+          uint32_t          queueFamily)
+  : m_vkd(vkd) {
+    VkCommandPoolCreateInfo poolInfo;
+    poolInfo.sType            = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    poolInfo.pNext            = nullptr;
+    poolInfo.flags            = 0;
+    poolInfo.queueFamilyIndex = queueFamily;
+    
+    if (m_vkd->vkCreateCommandPool(m_vkd->device(), &poolInfo, nullptr, &m_pool) != VK_SUCCESS)
+      throw DxvkError("DxvkCommandList::DxvkCommandList: Failed to create command pool");
+    
+    VkCommandBufferAllocateInfo cmdInfo;
+    cmdInfo.sType             = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    cmdInfo.pNext             = nullptr;
+    cmdInfo.commandPool       = m_pool;
+    cmdInfo.level             = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    cmdInfo.commandBufferCount = 1;
+    
+    if (m_vkd->vkAllocateCommandBuffers(m_vkd->device(), &cmdInfo, &m_buffer) != VK_SUCCESS)
+      throw DxvkError("DxvkCommandList::DxvkCommandList: Failed to allocate command buffer");
+  }
+  
+  
+  DxvkCommandList::~DxvkCommandList() {
+    m_resources.reset();
+    
+    m_vkd->vkDestroyCommandPool(
+      m_vkd->device(), m_pool, nullptr);
+  }
+  
+  
+  void DxvkCommandList::beginRecording() {
+    VkCommandBufferBeginInfo info;
+    info.sType            = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    info.pNext            = nullptr;
+    info.flags            = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    info.pInheritanceInfo = nullptr;
+    
+    if (m_vkd->vkResetCommandPool(m_vkd->device(), m_pool, 0) != VK_SUCCESS)
+      throw DxvkError("DxvkCommandList::beginRecording: Failed to reset command pool");
+    
+    if (m_vkd->vkBeginCommandBuffer(m_buffer, &info) != VK_SUCCESS)
+      throw DxvkError("DxvkCommandList::beginRecording: Failed to begin command buffer recording");
+  }
+  
+  
+  void DxvkCommandList::endRecording() {
+    if (m_vkd->vkEndCommandBuffer(m_buffer) != VK_SUCCESS)
+      throw DxvkError("DxvkCommandList::endRecording: Failed to record command buffer");
+  }
+  
+  
+  void DxvkCommandList::trackResource(const Rc<DxvkResource>& rc) {
+    m_resources.trackResource(rc);
+  }
+  
+  
+  void DxvkCommandList::reset() {
+    m_resources.reset();
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_cmdlist.h b/src/dxvk/dxvk_cmdlist.h
new file mode 100644
index 000000000..3bc3c0cfe
--- /dev/null
+++ b/src/dxvk/dxvk_cmdlist.h
@@ -0,0 +1,70 @@
+#pragma once
+
+#include <unordered_set>
+
+#include "dxvk_lifetime.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief DXVK command list
+   * 
+   * Stores a command buffer that a context can use to record Vulkan
+   * commands. The command list shall also reference the resources
+   * used by the recorded commands for automatic lifetime tracking.
+   * When the command list has completed execution, resources that
+   * are no longer used may get destroyed.
+   */
+  class DxvkCommandList : public RcObject {
+    
+  public:
+    
+    DxvkCommandList(
+      const Rc<vk::DeviceFn>& vkd,
+            uint32_t          queueFamily);
+    ~DxvkCommandList();
+    
+    /**
+     * \brief Command buffer handle
+     * \returns Command buffer handle
+     */
+    VkCommandBuffer handle() const {
+      return m_buffer;
+    }
+    
+    void beginRecording();
+    void endRecording();
+    
+    /**
+     * \brief Adds a resource to track
+     * 
+     * Adds a resource to the internal resource tracker.
+     * Resources will be kept alive and "in use" until
+     * the device can guarantee that the submission has
+     * completed.
+     */
+    void trackResource(
+      const Rc<DxvkResource>& rc);
+    
+    /**
+     * \brief Resets the command list
+     * 
+     * Resets the internal command buffer of the command list and
+     * marks all tracked resources as unused. When submitting the
+     * command list to the device, this method will be called once
+     * the command list completes execution.
+     */
+    void reset();
+    
+  private:
+    
+    Rc<vk::DeviceFn>    m_vkd;
+    
+    VkCommandPool       m_pool;
+    VkCommandBuffer     m_buffer;
+    
+    DxvkLifetimeTracker m_resources;
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_context.cpp b/src/dxvk/dxvk_context.cpp
new file mode 100644
index 000000000..ed044b617
--- /dev/null
+++ b/src/dxvk/dxvk_context.cpp
@@ -0,0 +1,73 @@
+#include "dxvk_context.h"
+#include "dxvk_main.h"
+
+namespace dxvk {
+  
+  DxvkContext::DxvkContext(const Rc<vk::DeviceFn>& vkd)
+  : m_vkd(vkd) { }
+  
+  
+  DxvkContext::~DxvkContext() {
+    
+  }
+  
+  
+  void DxvkContext::beginRecording(
+    const Rc<DxvkCommandList>& cmdList) {
+    m_commandList = cmdList;
+    m_commandList->beginRecording();
+  }
+  
+  
+  bool DxvkContext::endRecording() {
+    m_commandList->endRecording();
+    m_commandList = nullptr;
+    return true;
+  }
+    
+  
+  void DxvkContext::setFramebuffer(
+    const Rc<DxvkFramebuffer>& fb) {
+    const DxvkFramebufferSize fbSize = fb->size();
+    // TODO implement properly
+    VkRect2D renderArea;
+    renderArea.offset.x       = 0;
+    renderArea.offset.y       = 0;
+    renderArea.extent.width   = fbSize.width;
+    renderArea.extent.height  = fbSize.height;
+    
+    VkRenderPassBeginInfo info;
+    info.sType            = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+    info.pNext            = nullptr;
+    info.renderPass       = fb->renderPass();
+    info.framebuffer      = fb->handle();
+    info.renderArea       = renderArea;
+    info.clearValueCount  = 0;
+    info.pClearValues     = nullptr;
+    
+    // This is for testing purposes only.
+    VkClearAttachment attachment;
+    attachment.aspectMask       = VK_IMAGE_ASPECT_COLOR_BIT;
+    attachment.colorAttachment  = 0;
+    attachment.clearValue.color.float32[0] = 1.0f;
+    attachment.clearValue.color.float32[1] = 1.0f;
+    attachment.clearValue.color.float32[2] = 1.0f;
+    attachment.clearValue.color.float32[3] = 1.0f;
+    
+    VkClearRect clearRect;
+    clearRect.rect           = renderArea;
+    clearRect.baseArrayLayer = 0;
+    clearRect.layerCount     = fbSize.layers;
+    
+    m_vkd->vkCmdBeginRenderPass(
+      m_commandList->handle(), &info,
+      VK_SUBPASS_CONTENTS_INLINE);
+    m_vkd->vkCmdClearAttachments(
+      m_commandList->handle(),
+      1, &attachment,
+      1, &clearRect);
+    m_vkd->vkCmdEndRenderPass(
+      m_commandList->handle());
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_context.h b/src/dxvk/dxvk_context.h
new file mode 100644
index 000000000..33cc2d7ea
--- /dev/null
+++ b/src/dxvk/dxvk_context.h
@@ -0,0 +1,66 @@
+#pragma once
+
+#include "dxvk_cmdlist.h"
+#include "dxvk_framebuffer.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief DXVk context
+   * 
+   * Tracks pipeline state and records command lists.
+   * This is where the actual rendering commands are
+   * recorded.
+   */
+  class DxvkContext : public RcObject {
+    
+  public:
+    
+    DxvkContext(const Rc<vk::DeviceFn>& vkd);
+    ~DxvkContext();
+    
+    /**
+     * \brief Begins command buffer recording
+     * 
+     * Begins recording a command list. This does
+     * not alter any context state other than the
+     * active command list.
+     * \param [in] cmdList Target command list
+     */
+    void beginRecording(
+      const Rc<DxvkCommandList>& cmdList);
+    
+    /**
+     * \brief Ends command buffer recording
+     * 
+     * Finishes recording the active command list.
+     * The command list can then be submitted to
+     * the device.
+     * 
+     * The return value of this method can be used to
+     * determine whether the command list needs to be
+     * submitted. In case the command list is empty,
+     * \c false will be returned and it shall not be
+     * submitted to the device.
+     * 
+     * This will not change any context state
+     * other than the active command list.
+     * \returns \c true if any commands were recorded
+     */
+    bool endRecording();
+    
+    /**
+     * \brief Sets framebuffer
+     * \param [in] fb Framebuffer
+     */
+    void setFramebuffer(
+      const Rc<DxvkFramebuffer>& fb);
+    
+  private:
+    
+    Rc<vk::DeviceFn>    m_vkd;
+    Rc<DxvkCommandList> m_commandList;
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_context_state.cpp b/src/dxvk/dxvk_context_state.cpp
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/dxvk/dxvk_context_state.h b/src/dxvk/dxvk_context_state.h
new file mode 100644
index 000000000..02c506e2a
--- /dev/null
+++ b/src/dxvk/dxvk_context_state.h
@@ -0,0 +1,9 @@
+#pragma once
+
+#include "dxvk_cmdlist.h"
+
+namespace dxvk {
+  
+  
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_device.cpp b/src/dxvk/dxvk_device.cpp
new file mode 100644
index 000000000..9ebbb0db7
--- /dev/null
+++ b/src/dxvk/dxvk_device.cpp
@@ -0,0 +1,119 @@
+#include "dxvk_device.h"
+#include "dxvk_instance.h"
+
+namespace dxvk {
+  
+  DxvkDevice::DxvkDevice(
+    const Rc<DxvkAdapter>&  adapter,
+    const Rc<vk::DeviceFn>& vkd)
+  : m_adapter       (adapter),
+    m_vkd           (vkd),
+    m_memory        (adapter, vkd),
+    m_renderPassPool(vkd) {
+    m_vkd->vkGetDeviceQueue(m_vkd->device(),
+      m_adapter->graphicsQueueFamily(), 0,
+      &m_graphicsQueue);
+    m_vkd->vkGetDeviceQueue(m_vkd->device(),
+      m_adapter->presentQueueFamily(), 0,
+      &m_presentQueue);
+  }
+  
+  
+  DxvkDevice::~DxvkDevice() {
+    m_vkd->vkDeviceWaitIdle(m_vkd->device());
+    m_vkd->vkDestroyDevice(m_vkd->device(), nullptr);
+  }
+  
+  
+  Rc<DxvkCommandList> DxvkDevice::createCommandList() {
+    return new DxvkCommandList(m_vkd,
+      m_adapter->graphicsQueueFamily());
+  }
+  
+  
+  Rc<DxvkContext> DxvkDevice::createContext() {
+    return new DxvkContext(m_vkd);
+  }
+  
+  
+  Rc<DxvkFramebuffer> DxvkDevice::createFramebuffer(
+    const DxvkRenderTargets& renderTargets) {
+    auto format = renderTargets.renderPassFormat();
+    auto renderPass = m_renderPassPool.getRenderPass(format);
+    return new DxvkFramebuffer(m_vkd, renderPass, renderTargets);
+  }
+  
+  
+  Rc<DxvkImage> DxvkDevice::createImage(
+    const DxvkImageCreateInfo&  createInfo,
+          VkMemoryPropertyFlags memoryType) {
+    
+  }
+  
+  
+  Rc<DxvkImageView> DxvkDevice::createImageView(
+    const Rc<DxvkImage>&            image,
+    const DxvkImageViewCreateInfo&  createInfo) {
+    return new DxvkImageView(m_vkd, image, createInfo);
+  }
+  
+  
+  Rc<DxvkSemaphore> DxvkDevice::createSemaphore() {
+    return new DxvkSemaphore(m_vkd);
+  }
+  
+  
+  Rc<DxvkSwapchain> DxvkDevice::createSwapchain(
+    const Rc<DxvkSurface>&          surface,
+    const DxvkSwapchainProperties&  properties) {
+    return new DxvkSwapchain(this, surface, properties, m_presentQueue);
+  }
+  
+  
+  Rc<DxvkFence> DxvkDevice::submitCommandList(
+    const Rc<DxvkCommandList>&      commandList,
+    const Rc<DxvkSemaphore>&        waitSync,
+    const Rc<DxvkSemaphore>&        wakeSync) {
+    Rc<DxvkFence> fence = new DxvkFence(m_vkd);
+    
+    VkCommandBuffer commandBuffer = commandList->handle();
+    VkSemaphore     waitSemaphore = VK_NULL_HANDLE;
+    VkSemaphore     wakeSemaphore = VK_NULL_HANDLE;
+    
+    if (waitSync != nullptr) {
+      waitSemaphore = waitSync->handle();
+      commandList->trackResource(waitSync);
+    }
+    
+    if (wakeSync != nullptr) {
+      wakeSemaphore = wakeSync->handle();
+      commandList->trackResource(wakeSync);
+    }
+    
+    const VkPipelineStageFlags waitStageMask
+      = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+    
+    VkSubmitInfo info;
+    info.sType                = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    info.pNext                = nullptr;
+    info.waitSemaphoreCount   = waitSemaphore == VK_NULL_HANDLE ? 0 : 1;
+    info.pWaitSemaphores      = &waitSemaphore;
+    info.pWaitDstStageMask    = &waitStageMask;
+    info.commandBufferCount   = commandBuffer == VK_NULL_HANDLE ? 0 : 1;
+    info.pCommandBuffers      = &commandBuffer;
+    info.signalSemaphoreCount = wakeSemaphore == VK_NULL_HANDLE ? 0 : 1;
+    info.pSignalSemaphores    = &wakeSemaphore;
+    
+    if (m_vkd->vkQueueSubmit(m_graphicsQueue, 1, &info, fence->handle()) != VK_SUCCESS)
+      throw DxvkError("DxvkDevice::submitCommandList: Command submission failed");
+    
+    return fence;
+  }
+  
+  
+  void DxvkDevice::waitForIdle() const {
+    if (m_vkd->vkDeviceWaitIdle(m_vkd->device()) != VK_SUCCESS)
+      throw DxvkError("DxvkDevice::waitForIdle: Operation failed");
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_device.h b/src/dxvk/dxvk_device.h
new file mode 100644
index 000000000..5ea2cad9d
--- /dev/null
+++ b/src/dxvk/dxvk_device.h
@@ -0,0 +1,152 @@
+#pragma once
+
+#include "dxvk_adapter.h"
+#include "dxvk_buffer.h"
+#include "dxvk_context.h"
+#include "dxvk_framebuffer.h"
+#include "dxvk_memory.h"
+#include "dxvk_renderpass.h"
+#include "dxvk_swapchain.h"
+#include "dxvk_sync.h"
+
+namespace dxvk {
+  
+  class DxvkInstance;
+  
+  /**
+   * \brief DXVK device
+   * 
+   * Device object. This is responsible for resource creation,
+   * memory allocation, command submission and state tracking.
+   * Rendering commands are recorded into command lists using
+   * contexts. Multiple contexts can be created for a device.
+   */
+  class DxvkDevice : public RcObject {
+    
+  public:
+    
+    DxvkDevice(
+      const Rc<DxvkAdapter>&  adapter,
+      const Rc<vk::DeviceFn>& vkd);
+    ~DxvkDevice();
+    
+    /**
+     * \brief Vulkan device functions
+     * \returns Vulkan device functions
+     */
+    Rc<vk::DeviceFn> vkd() const {
+      return m_vkd;
+    }
+    
+    /**
+     * \brief Logical device handle
+     * \returns The device handle
+     */
+    VkDevice handle() const {
+      return m_vkd->device();
+    }
+    
+    /**
+     * \brief Creates a command list
+     * \returns The command list
+     */
+    Rc<DxvkCommandList> createCommandList();
+    
+    /**
+     * \brief Creates a context
+     * 
+     * Creates a context object that can
+     * be used to record command buffers.
+     * \returns The context object
+     */
+    Rc<DxvkContext> createContext();
+    
+    /**
+     * \brief Creates framebuffer for a set of render targets
+     * 
+     * Automatically deduces framebuffer dimensions
+     * from the supplied render target views.
+     * \param [in] renderTargets Render targets
+     * \returns The framebuffer object
+     */
+    Rc<DxvkFramebuffer> createFramebuffer(
+      const DxvkRenderTargets& renderTargets);
+    
+    /**
+     * \brief Creates an image object
+     * 
+     * \param [in] createInfo Image create info
+     * \param [in] memoryType Memory type flags
+     * \returns The image object
+     */
+    Rc<DxvkImage> createImage(
+      const DxvkImageCreateInfo&  createInfo,
+            VkMemoryPropertyFlags memoryType);
+    
+    /**
+     * \brief Creates an image view
+     * 
+     * \param [in] image The image to create a view for
+     * \param [in] createInfo Image view create info
+     * \returns The image view
+     */
+    Rc<DxvkImageView> createImageView(
+      const Rc<DxvkImage>&            image,
+      const DxvkImageViewCreateInfo&  createInfo);
+    
+    /**
+     * \brief Creates a semaphore object
+     * \returns Newly created semaphore
+     */
+    Rc<DxvkSemaphore> createSemaphore();
+    
+    /**
+     * \brief Creates a swap chain
+     * 
+     * \param [in] surface The target surface
+     * \param [in] properties Swapchain properties
+     * \returns The swapchain object
+     */
+    Rc<DxvkSwapchain> createSwapchain(
+      const Rc<DxvkSurface>&          surface,
+      const DxvkSwapchainProperties&  properties);
+    
+    /**
+     * \brief Submits a command list
+     * 
+     * Synchronization arguments are optional. 
+     * \param [in] commandList The command list to submit
+     * \param [in] waitSync (Optional) Semaphore to wait on
+     * \param [in] wakeSync (Optional) Semaphore to notify
+     * \returns Synchronization fence
+     */
+    Rc<DxvkFence> submitCommandList(
+      const Rc<DxvkCommandList>&      commandList,
+      const Rc<DxvkSemaphore>&        waitSync,
+      const Rc<DxvkSemaphore>&        wakeSync);
+    
+    /**
+     * \brief Waits until the device becomes idle
+     * 
+     * Waits for the GPU to complete the execution of all
+     * previously submitted command buffers. This may be
+     * used to ensure that resources that were previously
+     * used by the GPU can be safely destroyed.
+     */
+    void waitForIdle() const;
+    
+  private:
+    
+    Rc<DxvkAdapter>   m_adapter;
+    Rc<vk::DeviceFn>  m_vkd;
+    
+    DxvkMemoryAllocator m_memory;
+    DxvkRenderPassPool  m_renderPassPool;
+    
+    VkQueue m_graphicsQueue;
+    VkQueue m_presentQueue;
+    
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_framebuffer.cpp b/src/dxvk/dxvk_framebuffer.cpp
new file mode 100644
index 000000000..5b1fad274
--- /dev/null
+++ b/src/dxvk/dxvk_framebuffer.cpp
@@ -0,0 +1,95 @@
+#include "dxvk_framebuffer.h"
+
+namespace dxvk {
+  
+  DxvkRenderTargets:: DxvkRenderTargets() { }
+  DxvkRenderTargets::~DxvkRenderTargets() { }
+  
+  
+  DxvkRenderPassFormat DxvkRenderTargets::renderPassFormat() const {
+    DxvkRenderPassFormat result;
+    
+    for (uint32_t i = 0; i < MaxNumColorTargets; i++) {
+      if (m_colorTargets.at(i) != nullptr) {
+        result.setColorFormat(i, m_colorTargets.at(i)->info().format);
+        result.setSampleCount(m_colorTargets.at(i)->imageInfo().sampleCount);
+      }
+    }
+    
+    if (m_depthTarget != nullptr) {
+      result.setDepthFormat(m_depthTarget->info().format);
+      result.setSampleCount(m_depthTarget->imageInfo().sampleCount);
+    }
+    
+    return result;
+  }
+  
+  
+  std::vector<VkImageView> DxvkRenderTargets::getAttachments() const {
+    std::vector<VkImageView> result;
+    
+    if (m_depthTarget != nullptr)
+      result.push_back(m_depthTarget->handle());
+    
+    for (uint32_t i = 0; i < MaxNumColorTargets; i++) {
+      if (m_colorTargets.at(i) != nullptr)
+        result.push_back(m_colorTargets.at(i)->handle());
+    }
+    
+    return result;
+  }
+  
+  
+  DxvkFramebufferSize DxvkRenderTargets::getImageSize() const {
+    if (m_depthTarget != nullptr)
+      return this->renderTargetSize(m_depthTarget);
+    
+    for (uint32_t i = 0; i < MaxNumColorTargets; i++) {
+      if (m_colorTargets.at(i) != nullptr)
+        return this->renderTargetSize(m_colorTargets.at(i));
+    }
+    
+    return DxvkFramebufferSize { 0, 0, 0 };
+  }
+  
+  
+  DxvkFramebufferSize DxvkRenderTargets::renderTargetSize(
+    const Rc<DxvkImageView>& renderTarget) const {
+    auto extent = renderTarget->imageInfo().extent;
+    auto layers = renderTarget->info().numLayers;
+    return DxvkFramebufferSize { extent.width, extent.height, layers };
+  }
+  
+  
+  DxvkFramebuffer::DxvkFramebuffer(
+    const Rc<vk::DeviceFn>&       vkd,
+    const Rc<DxvkRenderPass>&     renderPass,
+    const DxvkRenderTargets&      renderTargets)
+  : m_vkd             (vkd),
+    m_renderPass      (renderPass),
+    m_renderTargets   (renderTargets),
+    m_framebufferSize (renderTargets.getImageSize()) {
+    auto views = renderTargets.getAttachments();
+    
+    VkFramebufferCreateInfo info;
+    info.sType                = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    info.pNext                = nullptr;
+    info.flags                = 0;
+    info.renderPass           = renderPass->handle();
+    info.attachmentCount      = views.size();
+    info.pAttachments         = views.data();
+    info.width                = m_framebufferSize.width;
+    info.height               = m_framebufferSize.height;
+    info.layers               = m_framebufferSize.layers;
+    
+    if (m_vkd->vkCreateFramebuffer(m_vkd->device(), &info, nullptr, &m_framebuffer) != VK_SUCCESS)
+      throw DxvkError("DxvkFramebuffer::DxvkFramebuffer: Failed to create framebuffer object");
+  }
+  
+  
+  DxvkFramebuffer::~DxvkFramebuffer() {
+    m_vkd->vkDestroyFramebuffer(
+      m_vkd->device(), m_framebuffer, nullptr);
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_framebuffer.h b/src/dxvk/dxvk_framebuffer.h
new file mode 100644
index 000000000..4a032f263
--- /dev/null
+++ b/src/dxvk/dxvk_framebuffer.h
@@ -0,0 +1,160 @@
+#pragma once
+
+#include "dxvk_image.h"
+#include "dxvk_renderpass.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief Framebuffer size
+   * 
+   * Stores the width, height and number of layers
+   * of a framebuffer. This can be used in case a
+   * framebuffer does not have any attachments.
+   */
+  struct DxvkFramebufferSize {
+    uint32_t width;
+    uint32_t height;
+    uint32_t layers;
+  };
+  
+  
+  /**
+   * \brief Render target description
+   * 
+   * Stores render targets for a framebuffer object
+   * and provides methods to query the render pass
+   * format. Note that all render target views must
+   * have the same size and number of array layers.
+   */
+  class DxvkRenderTargets {
+    
+  public:
+    
+    DxvkRenderTargets();
+    ~DxvkRenderTargets();
+    
+    /**
+     * \brief Retrieves color target
+     * 
+     * \param [in] id Color attachment ID
+     * \returns Render target view
+     */
+    Rc<DxvkImageView> getColorTarget(uint32_t id) const {
+      return m_colorTargets.at(id);
+    }
+    
+    /**
+     * \brief Retrieves depth-stencil target
+     * \returns Depth-stencil target view
+     */
+    Rc<DxvkImageView> getDepthTarget() const {
+      return m_depthTarget;
+    }
+    
+    /**
+     * \brief Sets color target
+     * 
+     * \param [in] id Color attachment ID
+     * \param [in] view Render target view
+     */
+    void setColorTarget(uint32_t id, const Rc<DxvkImageView>& view) {
+      m_colorTargets.at(id) = view;
+    }
+    
+    /**
+     * \brief Sets depth-stencil target
+     * \param [in] view Depth-stencil target view
+     */
+    void setDepthTarget(const Rc<DxvkImageView>& view) {
+      m_depthTarget = view;
+    }
+    
+    /**
+     * \brief Render pass format
+     * 
+     * Computes the render pass format based on
+     * the color and depth-stencil attachments.
+     * \returns Render pass format
+     */
+    DxvkRenderPassFormat renderPassFormat() const;
+    
+    /**
+     * \brief Creates attachment list
+     * \returns Framebuffer attachment list
+     */
+    std::vector<VkImageView> getAttachments() const;
+    
+    /**
+     * \brief Framebuffer size
+     * 
+     * The width, height and layers
+     * of the attached render targets.
+     * \returns Framebuffer size
+     */
+    DxvkFramebufferSize getImageSize() const;
+    
+  private:
+    
+    std::array<Rc<DxvkImageView>, MaxNumColorTargets> m_colorTargets;
+    Rc<DxvkImageView>                                 m_depthTarget;
+    
+    DxvkFramebufferSize renderTargetSize(
+      const Rc<DxvkImageView>& renderTarget) const;
+    
+  };
+  
+  
+  /**
+   * \brief DXVK framebuffer
+   * 
+   * A framebuffer either stores a set of image views
+   * that will be used as render targets, or in case
+   * no render targets are being used, fixed viewport
+   * dimensions.
+   */
+  class DxvkFramebuffer : public DxvkResource {
+    
+  public:
+    
+    DxvkFramebuffer(
+      const Rc<vk::DeviceFn>&       vkd,
+      const Rc<DxvkRenderPass>&     renderPass,
+      const DxvkRenderTargets&      renderTargets);
+    ~DxvkFramebuffer();
+    
+    /**
+     * \brief Framebuffer handle
+     * \returns Framebuffer handle
+     */
+    VkFramebuffer handle() const {
+      return m_framebuffer;
+    }
+    
+    /**
+     * \brief Render pass handle
+     * \returns Render pass handle
+     */
+    VkRenderPass renderPass() const {
+      return m_renderPass->handle();
+    }
+    
+    /**
+     * \brief Framebuffer size
+     * \returns Framebuffer size
+     */
+    DxvkFramebufferSize size() const {
+      return m_framebufferSize;
+    }
+    
+  private:
+    
+    Rc<vk::DeviceFn>    m_vkd;
+    Rc<DxvkRenderPass>  m_renderPass;
+    DxvkRenderTargets   m_renderTargets;
+    DxvkFramebufferSize m_framebufferSize;
+    VkFramebuffer       m_framebuffer;
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_hash.h b/src/dxvk/dxvk_hash.h
new file mode 100644
index 000000000..0ee55dcb7
--- /dev/null
+++ b/src/dxvk/dxvk_hash.h
@@ -0,0 +1,34 @@
+#pragma once
+
+#include <cstddef>
+
+namespace dxvk {
+
+  struct DxvkHash {
+    template<typename T>
+    size_t operator () (const T& object) const {
+      return object.hash();
+    }
+  };
+  
+  class DxvkHashState {
+    
+  public:
+    
+    void add(size_t hash) {
+      m_value ^= hash + 0x9e3779b9
+               + (m_value << 6)
+               + (m_value >> 2);
+    }
+    
+    operator size_t () const {
+      return m_value;
+    }
+    
+  private:
+    
+    size_t m_value = 0;
+    
+  };
+
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_image.cpp b/src/dxvk/dxvk_image.cpp
new file mode 100644
index 000000000..e66e6c88e
--- /dev/null
+++ b/src/dxvk/dxvk_image.cpp
@@ -0,0 +1,66 @@
+#include "dxvk_image.h"
+
+namespace dxvk {
+  
+//   DxvkImage::DxvkImage(
+//     const Rc<vk::DeviceFn>&     vkd,
+//     const DxvkImageCreateInfo&  info,
+//           DxvkMemory&&          memory)
+//   : m_vkd(vkd), m_info(info), m_memory(std::move(memory)) {
+//     
+//   }
+  
+  
+  DxvkImage::DxvkImage(
+    const Rc<vk::DeviceFn>&     vkd,
+    const DxvkImageCreateInfo&  info,
+          VkImage               image)
+  : m_vkd(vkd), m_info(info), m_image(image) { }
+  
+  
+  DxvkImage::~DxvkImage() {
+    // This is a bit of a hack to determine whether
+    // the image is implementation-handled or not
+    if (m_memory.memory() != VK_NULL_HANDLE)
+      m_vkd->vkDestroyImage(m_vkd->device(), m_image, nullptr);
+  }
+  
+  
+  DxvkImageView::DxvkImageView(
+    const Rc<vk::DeviceFn>&         vkd,
+    const Rc<DxvkImage>&            image,
+    const DxvkImageViewCreateInfo&  info)
+  : m_vkd(vkd), m_image(image), m_info(info) {
+    VkComponentMapping componentMapping;
+    componentMapping.r = VK_COMPONENT_SWIZZLE_IDENTITY;
+    componentMapping.g = VK_COMPONENT_SWIZZLE_IDENTITY;
+    componentMapping.b = VK_COMPONENT_SWIZZLE_IDENTITY;
+    componentMapping.a = VK_COMPONENT_SWIZZLE_IDENTITY;
+    
+    VkImageSubresourceRange subresourceRange;
+    subresourceRange.aspectMask     = info.aspect;
+    subresourceRange.baseMipLevel   = info.minLevel;
+    subresourceRange.levelCount     = info.numLevels;
+    subresourceRange.baseArrayLayer = info.minLayer;
+    subresourceRange.layerCount     = info.numLayers;
+    
+    VkImageViewCreateInfo viewInfo;
+    viewInfo.sType            = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    viewInfo.pNext            = nullptr;
+    viewInfo.flags            = 0;
+    viewInfo.image            = image->handle();
+    viewInfo.viewType         = info.type;
+    viewInfo.format           = info.format;
+    viewInfo.components       = componentMapping;
+    viewInfo.subresourceRange = subresourceRange;
+    
+    if (m_vkd->vkCreateImageView(m_vkd->device(), &viewInfo, nullptr, &m_view) != VK_SUCCESS)
+      throw DxvkError("DxvkImageView::DxvkImageView: Failed to create image view");
+  }
+  
+  
+  DxvkImageView::~DxvkImageView() {
+    m_vkd->vkDestroyImageView(m_vkd->device(), m_view, nullptr);
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_image.h b/src/dxvk/dxvk_image.h
new file mode 100644
index 000000000..5e367731d
--- /dev/null
+++ b/src/dxvk/dxvk_image.h
@@ -0,0 +1,151 @@
+#pragma once
+
+#include "dxvk_memory.h"
+#include "dxvk_resource.h"
+
+namespace dxvk {
+  
+  struct DxvkImageCreateInfo {
+    VkImageType type;
+    VkFormat format;
+    VkSampleCountFlagBits sampleCount;
+    VkExtent3D extent;
+    uint32_t numLayers;
+    uint32_t mipLevels;
+    VkImageUsageFlags usage;
+    VkImageTiling tiling;
+  };
+  
+  struct DxvkImageViewCreateInfo {
+    VkImageViewType type;
+    VkFormat format;
+    VkImageAspectFlags aspect;
+    uint32_t minLevel;
+    uint32_t numLevels;
+    uint32_t minLayer;
+    uint32_t numLayers;
+  };
+  
+  
+  /**
+   * \brief DXVK image
+   */
+  class DxvkImage : public DxvkResource {
+    
+  public:
+    
+    /**
+     * \brief Creates a new image
+     * 
+     * \param [in] vkd Vulkan device functions
+     * \param [in] info Image properties
+     * \param [in] memory Image memory
+     */
+//     DxvkImage(
+//       const Rc<vk::DeviceFn>&     vkd,
+//       const DxvkImageCreateInfo&  info,
+//             DxvkMemory&&          memory);
+    
+    /**
+     * \brief Creates image object from existing image
+     * 
+     * This can be used to create an image object for
+     * an implementation-managed image. Make sure to
+     * provide the correct image properties, since
+     * otherwise some image operations may fail.
+     */
+    DxvkImage(
+      const Rc<vk::DeviceFn>&     vkd,
+      const DxvkImageCreateInfo&  info,
+            VkImage               image);
+    
+    /**
+     * \brief Destroys image
+     * 
+     * If this is an implementation-managed image,
+     * this will not destroy the Vulkan image.
+     */
+    ~DxvkImage();
+    
+    /**
+     * \brief Image handle
+     * 
+     * Internal use only.
+     * \returns Image handle
+     */
+    VkImage handle() const {
+      return m_image;
+    }
+    
+    /**
+     * \brief Image properties
+     * 
+     * The image create info structure.
+     * \returns Image properties
+     */
+    const DxvkImageCreateInfo& info() const {
+      return m_info;
+    }
+    
+  private:
+    
+    Rc<vk::DeviceFn>    m_vkd;
+    DxvkImageCreateInfo m_info;
+    DxvkMemory          m_memory;
+    VkImage             m_image;
+    
+  };
+  
+  
+  /**
+   * \brief DXVK image view
+   */
+  class DxvkImageView : public DxvkResource {
+    
+  public:
+    
+    DxvkImageView(
+      const Rc<vk::DeviceFn>&         vkd,
+      const Rc<DxvkImage>&            image,
+      const DxvkImageViewCreateInfo&  info);
+    
+    ~DxvkImageView();
+    
+    /**
+     * \brief Image view handle
+     * 
+     * Internal use only.
+     * \returns Image view handle
+     */
+    VkImageView handle() const {
+      return m_view;
+    }
+    
+    /**
+     * \brief Image view properties
+     * \returns Image view properties
+     */
+    const DxvkImageViewCreateInfo& info() const {
+      return m_info;
+    }
+    
+    /**
+     * \brief Image properties
+     * \returns Image properties
+     */
+    const DxvkImageCreateInfo& imageInfo() const {
+      return m_image->info();
+    }
+    
+  private:
+    
+    Rc<vk::DeviceFn>  m_vkd;
+    Rc<DxvkImage>     m_image;
+    
+    DxvkImageViewCreateInfo m_info;
+    VkImageView             m_view;
+    
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_include.h b/src/dxvk/dxvk_include.h
new file mode 100644
index 000000000..b89bcfdb8
--- /dev/null
+++ b/src/dxvk/dxvk_include.h
@@ -0,0 +1,10 @@
+#pragma once
+
+#include "../util/util_error.h"
+#include "../util/util_string.h"
+
+#include "../util/rc/util_rc.h"
+#include "../util/rc/util_rc_ptr.h"
+
+#include "./vulkan/dxvk_vulkan_extensions.h"
+#include "./vulkan/dxvk_vulkan_loader.h"
diff --git a/src/dxvk/dxvk_instance.cpp b/src/dxvk/dxvk_instance.cpp
new file mode 100644
index 000000000..d1452b55d
--- /dev/null
+++ b/src/dxvk/dxvk_instance.cpp
@@ -0,0 +1,108 @@
+#include "dxvk_instance.h"
+#include "dxvk_main.h"
+
+namespace dxvk {
+  
+  DxvkInstance::DxvkInstance()
+  : m_vkl(new vk::LibraryFn()),
+    m_vki(new vk::InstanceFn(this->createInstance())) {
+    
+  }
+  
+  
+  DxvkInstance::~DxvkInstance() {
+    m_vki->vkDestroyInstance(
+      m_vki->instance(), nullptr);
+  }
+  
+  
+  std::vector<Rc<DxvkAdapter>> DxvkInstance::enumAdapters() {
+    uint32_t numAdapters = 0;
+    if (m_vki->vkEnumeratePhysicalDevices(m_vki->instance(), &numAdapters, nullptr) != VK_SUCCESS)
+      throw DxvkError("DxvkInstance::enumAdapters: Failed to enumerate adapters");
+    
+    std::vector<VkPhysicalDevice> adapters(numAdapters);
+    if (m_vki->vkEnumeratePhysicalDevices(m_vki->instance(), &numAdapters, adapters.data()) != VK_SUCCESS)
+      throw DxvkError("DxvkInstance::enumAdapters: Failed to enumerate adapters");
+    
+    std::vector<Rc<DxvkAdapter>> result;
+    for (uint32_t i = 0; i < numAdapters; i++)
+      result.push_back(new DxvkAdapter(this, adapters[i]));
+    return result;
+  }
+  
+  
+  VkInstance DxvkInstance::createInstance() {
+    auto enabledLayers     = this->getLayers();
+    auto enabledExtensions = this->getExtensions(enabledLayers);
+    
+    VkApplicationInfo appInfo;
+    appInfo.sType                 = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+    appInfo.pNext                 = nullptr;
+    appInfo.pApplicationName      = nullptr;
+    appInfo.applicationVersion    = 0;
+    appInfo.pEngineName           = "DXVK";
+    appInfo.engineVersion         = VK_MAKE_VERSION(0, 0, 1);
+    appInfo.apiVersion            = VK_MAKE_VERSION(1, 0, 47);
+    
+    VkInstanceCreateInfo info;
+    info.sType                    = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+    info.pNext                    = nullptr;
+    info.flags                    = 0;
+    info.pApplicationInfo         = &appInfo;
+    info.enabledLayerCount        = enabledLayers.count();
+    info.ppEnabledLayerNames      = enabledLayers.names();
+    info.enabledExtensionCount    = enabledExtensions.count();
+    info.ppEnabledExtensionNames  = enabledExtensions.names();
+    
+    VkInstance result = VK_NULL_HANDLE;
+    if (m_vkl->vkCreateInstance(&info, nullptr, &result) != VK_SUCCESS)
+      throw DxvkError("DxvkInstance::createInstance: Failed to create Vulkan instance");
+    return result;
+  }
+  
+  
+  vk::NameList DxvkInstance::getLayers() {
+    std::vector<const char*> layers = {
+      "VK_LAYER_LUNARG_standard_validation"
+    };
+    
+    const vk::NameSet layersAvailable
+      = vk::NameSet::enumerateInstanceLayers(*m_vkl);
+    
+    vk::NameList layersEnabled;
+    for (auto l : layers) {
+      if (layersAvailable.supports(l))
+        layersEnabled.add(l);
+    }
+    
+    return layersEnabled;
+  }
+  
+  
+  vk::NameList DxvkInstance::getExtensions(const vk::NameList& layers) {
+    std::vector<const char*> extOptional = { };
+    std::vector<const char*> extRequired = {
+      VK_KHR_SURFACE_EXTENSION_NAME,
+      VK_KHR_WIN32_SURFACE_EXTENSION_NAME,
+    };
+    
+    const vk::NameSet extensionsAvailable
+      = vk::NameSet::enumerateInstanceExtensions(*m_vkl, layers);
+    vk::NameList extensionsEnabled;
+    
+    for (auto e : extOptional) {
+      if (extensionsAvailable.supports(e))
+        extensionsEnabled.add(e);
+    }
+    
+    for (auto e : extRequired) {
+      if (!extensionsAvailable.supports(e))
+        throw DxvkError(str::format("DxvkInstance::getExtensions: Extension ", e, " not supported"));
+      extensionsEnabled.add(e);
+    }
+    
+    return extensionsEnabled;
+  }
+  
+}
diff --git a/src/dxvk/dxvk_instance.h b/src/dxvk/dxvk_instance.h
new file mode 100644
index 000000000..ffe2766fa
--- /dev/null
+++ b/src/dxvk/dxvk_instance.h
@@ -0,0 +1,56 @@
+#pragma once
+
+#include "dxvk_adapter.h"
+#include "dxvk_device.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief DXVK instance
+   * 
+   * Manages a Vulkan instance and stores a list
+   * of adapters. This also provides methods for
+   * device creation.
+   */
+  class DxvkInstance : public RcObject {
+    
+  public:
+    
+    DxvkInstance();
+    ~DxvkInstance();
+    
+    /**
+     * \brief Vulkan instance functions
+     * \returns Vulkan instance functions
+     */
+    Rc<vk::InstanceFn> vki() const {
+      return m_vki;
+    }
+    
+    /**
+     * \brief Vulkan instance handle
+     * \returns The instance handle
+     */
+    VkInstance handle() {
+      return m_vki->instance();
+    }
+    
+    /**
+     * \brief Retrieves a list of adapters
+     * \returns List of adapter objects
+     */
+    std::vector<Rc<DxvkAdapter>> enumAdapters();
+    
+  private:
+    
+    Rc<vk::LibraryFn>   m_vkl;
+    Rc<vk::InstanceFn>  m_vki;
+    
+    VkInstance createInstance();
+    
+    vk::NameList getLayers();
+    vk::NameList getExtensions(const vk::NameList& layers);
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_lifetime.cpp b/src/dxvk/dxvk_lifetime.cpp
new file mode 100644
index 000000000..648a846f8
--- /dev/null
+++ b/src/dxvk/dxvk_lifetime.cpp
@@ -0,0 +1,21 @@
+#include "dxvk_lifetime.h"
+
+namespace dxvk {
+  
+  DxvkLifetimeTracker:: DxvkLifetimeTracker() { }
+  DxvkLifetimeTracker::~DxvkLifetimeTracker() { }
+  
+  
+  void DxvkLifetimeTracker::trackResource(const Rc<DxvkResource>& rc) {
+    if (m_resources.insert(rc).second)
+      rc->incUseCount();
+  }
+  
+  
+  void DxvkLifetimeTracker::reset() {
+    for (auto i = m_resources.cbegin(); i != m_resources.cend(); i++)
+      (*i)->decUseCount();
+    m_resources.clear();
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_lifetime.h b/src/dxvk/dxvk_lifetime.h
new file mode 100644
index 000000000..ebdf2ef86
--- /dev/null
+++ b/src/dxvk/dxvk_lifetime.h
@@ -0,0 +1,45 @@
+#pragma once
+
+#include <unordered_set>
+
+#include "dxvk_resource.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief DXVK lifetime tracker
+   * 
+   * Maintains references to a set of resources. This is
+   * used to guarantee that resources are not destroyed
+   * or otherwise accessed in an unsafe manner until the
+   * device has finished using them.
+   */
+  class DxvkLifetimeTracker {
+    
+  public:
+    
+    DxvkLifetimeTracker();
+    ~DxvkLifetimeTracker();
+    
+    /**
+     * \brief Adds a resource to track
+     * \param [in] rc The resource to track
+     */
+    void trackResource(
+      const Rc<DxvkResource>& rc);
+    
+    /**
+     * \brief Resets the command list
+     * 
+     * Called automatically by the device when
+     * the command list has completed execution.
+     */
+    void reset();
+    
+  private:
+    
+    std::unordered_set<Rc<DxvkResource>, RcHash<DxvkResource>> m_resources;
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_main.cpp b/src/dxvk/dxvk_main.cpp
new file mode 100644
index 000000000..0dbe3e0b1
--- /dev/null
+++ b/src/dxvk/dxvk_main.cpp
@@ -0,0 +1,11 @@
+#include "dxvk_main.h"
+
+namespace dxvk {
+  
+  Log g_logger("dxvk.log");
+  
+  void log(const std::string& message) {
+    g_logger.log(message);
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_main.h b/src/dxvk/dxvk_main.h
new file mode 100644
index 000000000..6c2719946
--- /dev/null
+++ b/src/dxvk/dxvk_main.h
@@ -0,0 +1,14 @@
+#pragma once
+
+#include "../util/log/log.h"
+
+
+namespace dxvk {
+  
+  /**
+   * \brief Adds a message to the global DXVK log
+   * \param [in] message Log message
+   */
+  void log(const std::string& message);
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_memory.cpp b/src/dxvk/dxvk_memory.cpp
new file mode 100644
index 000000000..2ce0740cd
--- /dev/null
+++ b/src/dxvk/dxvk_memory.cpp
@@ -0,0 +1,112 @@
+#include "dxvk_memory.h"
+
+namespace dxvk {
+  
+  DxvkMemory::DxvkMemory() {
+    
+  }
+  
+  
+  DxvkMemory::DxvkMemory(
+    DxvkMemoryAllocator*  alloc,
+    VkDeviceMemory        memory,
+    void*                 mapPtr)
+  : m_alloc (alloc),
+    m_memory(memory),
+    m_mapPtr(mapPtr) { }
+  
+  
+  DxvkMemory::DxvkMemory(DxvkMemory&& other)
+  : m_alloc (other.m_alloc),
+    m_memory(other.m_memory),
+    m_mapPtr(other.m_mapPtr) {
+    other.m_alloc  = nullptr;
+    other.m_memory = VK_NULL_HANDLE;
+    other.m_mapPtr = nullptr;
+  }
+  
+  
+  DxvkMemory& DxvkMemory::operator = (DxvkMemory&& other) {
+    this->m_alloc  = other.m_alloc;
+    this->m_memory = other.m_memory;
+    this->m_mapPtr = other.m_mapPtr;
+    other.m_alloc  = nullptr;
+    other.m_memory = VK_NULL_HANDLE;
+    other.m_mapPtr = nullptr;
+    return *this;
+  }
+  
+  
+  DxvkMemory::~DxvkMemory() {
+    if (m_memory != VK_NULL_HANDLE)
+      m_alloc->freeMemory(m_memory);
+  }
+  
+  
+  DxvkMemoryAllocator::DxvkMemoryAllocator(
+    const Rc<DxvkAdapter>&  adapter,
+    const Rc<vk::DeviceFn>& vkd)
+  : m_vkd(vkd), m_memProps(adapter->memoryProperties()) {
+    
+  }
+  
+  
+  DxvkMemoryAllocator::~DxvkMemoryAllocator() {
+    
+  }
+  
+  
+  DxvkMemory DxvkMemoryAllocator::alloc(
+    const VkMemoryRequirements& req,
+    const VkMemoryPropertyFlags flags) {
+    
+    VkDeviceMemory memory = VK_NULL_HANDLE;
+    void*          mapPtr = nullptr;
+    
+    for (uint32_t i = 0; i < m_memProps.memoryTypeCount; i++) {
+      const bool supported = (req.memoryTypeBits & (1u << i)) != 0;
+      const bool adequate  = (m_memProps.memoryTypes[i].propertyFlags & flags) == flags;
+      
+      if (supported && adequate) {
+        memory = this->allocMemory(req.size, i);
+        
+        if (memory != VK_NULL_HANDLE)
+          break;
+      }
+    }
+    
+    if (memory == VK_NULL_HANDLE)
+      throw DxvkError("DxvkMemoryAllocator::alloc: Failed to allocate memory");
+    
+    if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
+      if (m_vkd->vkMapMemory(m_vkd->device(), memory,
+          0, VK_WHOLE_SIZE, 0, &mapPtr) != VK_SUCCESS)
+        throw DxvkError("DxvkMemoryAllocator::alloc: Failed to map memory");
+    }
+    
+    return DxvkMemory(this, memory, mapPtr);
+  }
+  
+  
+  VkDeviceMemory DxvkMemoryAllocator::allocMemory(
+    VkDeviceSize blockSize, uint32_t memoryType) {
+    
+    VkMemoryAllocateInfo info;
+    info.sType            = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    info.pNext            = nullptr;
+    info.allocationSize   = blockSize;
+    info.memoryTypeIndex  = memoryType;
+    
+    VkDeviceMemory memory = VK_NULL_HANDLE;
+    if (m_vkd->vkAllocateMemory(m_vkd->device(),
+        &info, nullptr, &memory) != VK_SUCCESS)
+      return VK_NULL_HANDLE;
+    return memory;
+  }
+  
+  
+  void DxvkMemoryAllocator::freeMemory(VkDeviceMemory memory) {
+    m_vkd->vkFreeMemory(m_vkd->device(), memory, nullptr);
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_memory.h b/src/dxvk/dxvk_memory.h
new file mode 100644
index 000000000..c44f4ea3f
--- /dev/null
+++ b/src/dxvk/dxvk_memory.h
@@ -0,0 +1,105 @@
+#pragma once
+
+#include "dxvk_adapter.h"
+
+namespace dxvk {
+  
+  class DxvkMemoryAllocator;
+  
+  
+  /**
+   * \brief Memory slice
+   */
+  class DxvkMemory {
+    
+  public:
+    
+    DxvkMemory();
+    DxvkMemory(
+      DxvkMemoryAllocator*  alloc,
+      VkDeviceMemory        memory,
+      void*                 mapPtr);
+    DxvkMemory             (DxvkMemory&& other);
+    DxvkMemory& operator = (DxvkMemory&& other);
+    ~DxvkMemory();
+    
+    /**
+     * \brief Memory object
+     * 
+     * This information is required when
+     * binding memory to Vulkan objects.
+     * \returns Memory object
+     */
+    VkDeviceMemory memory() const {
+      return m_memory;
+    }
+    
+    /**
+     * \brief Offset from memory object
+     * 
+     * This information is required when
+     * binding memory to Vulkan objects.
+     * \returns Offset from memory object
+     */
+    VkDeviceSize offset() const {
+      return 0;
+    }
+    
+    /**
+     * \brief Pointer to mapped data
+     * \returns Pointer to mapped data
+     */
+    void* mapPtr() const {
+      return m_mapPtr;
+    }
+    
+  private:
+    
+    DxvkMemoryAllocator*  m_alloc  = nullptr;
+    VkDeviceMemory        m_memory = VK_NULL_HANDLE;
+    void*                 m_mapPtr = nullptr;
+    
+  };
+  
+  
+  /**
+   * \brief Memory allocator
+   * 
+   * Allocates device memory for Vulkan resources.
+   * Memory objects will be destroyed automatically.
+   */
+  class DxvkMemoryAllocator {
+    friend class DxvkMemory;
+  public:
+    
+    DxvkMemoryAllocator(
+      const Rc<DxvkAdapter>&  adapter,
+      const Rc<vk::DeviceFn>& vkd);
+    ~DxvkMemoryAllocator();
+    
+    /**
+     * \brief Allocates device memory
+     * 
+     * \param [in] req Memory requirements
+     * \param [in] flats Memory type flags
+     * \returns Allocated memory slice
+     */
+    DxvkMemory alloc(
+      const VkMemoryRequirements& req,
+      const VkMemoryPropertyFlags flags);
+    
+  private:
+    
+    const Rc<vk::DeviceFn>                 m_vkd;
+    const VkPhysicalDeviceMemoryProperties m_memProps;
+    
+    VkDeviceMemory allocMemory(
+      VkDeviceSize    blockSize,
+      uint32_t        memoryType);
+    
+    void freeMemory(
+      VkDeviceMemory  memory);
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_renderpass.cpp b/src/dxvk/dxvk_renderpass.cpp
new file mode 100644
index 000000000..c7aad5c52
--- /dev/null
+++ b/src/dxvk/dxvk_renderpass.cpp
@@ -0,0 +1,166 @@
+#include "dxvk_renderpass.h"
+
+namespace dxvk {
+  
+  DxvkRenderPassFormat::DxvkRenderPassFormat() {
+    for (uint32_t i = 0; i < MaxNumColorTargets; i++)
+      m_color.at(i) = VK_FORMAT_UNDEFINED;
+    m_depth   = VK_FORMAT_UNDEFINED;
+    m_samples = VK_SAMPLE_COUNT_1_BIT;
+  }
+  
+  
+  size_t DxvkRenderPassFormat::hash() const {
+    DxvkHashState result;
+    std::hash<VkFormat>              fhash;
+    std::hash<VkSampleCountFlagBits> shash;
+    
+    for (uint32_t i = 0; i < MaxNumColorTargets; i++)
+      result.add(fhash(m_color.at(i)));
+    
+    result.add(fhash(m_depth));
+    result.add(shash(m_samples));
+    return result;
+  }
+  
+  
+  bool DxvkRenderPassFormat::operator == (const DxvkRenderPassFormat& other) const {
+    bool equal = m_depth   == other.m_depth
+              && m_samples == other.m_samples;
+    for (uint32_t i = 0; i < MaxNumColorTargets && !equal; i++)
+      equal = m_color.at(i) == other.m_color.at(i);
+    return equal;
+  }
+  
+  
+  bool DxvkRenderPassFormat::operator != (const DxvkRenderPassFormat& other) const {
+    return !this->operator == (other);
+  }
+  
+  
+  DxvkRenderPass::DxvkRenderPass(
+    const Rc<vk::DeviceFn>&     vkd,
+    const DxvkRenderPassFormat& fmt,
+          VkImageLayout         initialLayout,
+          VkImageLayout         finalLayout)
+  : m_vkd(vkd), m_format(fmt) {
+    std::vector<VkAttachmentDescription> attachments;
+    
+    VkAttachmentReference                                 depthRef;
+    std::array<VkAttachmentReference, MaxNumColorTargets> colorRef;
+    
+    // Render passes may not require the previous
+    // contents of the attachments to be preserved.
+    VkAttachmentLoadOp  loadOp  = VK_ATTACHMENT_LOAD_OP_LOAD;
+    VkAttachmentStoreOp storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+    
+    if (initialLayout == VK_IMAGE_LAYOUT_UNDEFINED)
+      loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    
+    if (fmt.getDepthFormat() != VK_FORMAT_UNDEFINED) {
+      VkAttachmentDescription desc;
+      desc.flags          = 0;
+      desc.format         = fmt.getDepthFormat();
+      desc.samples        = fmt.getSampleCount();
+      desc.loadOp         = loadOp;
+      desc.storeOp        = storeOp;
+      desc.stencilLoadOp  = loadOp;
+      desc.stencilStoreOp = storeOp;
+      desc.initialLayout  = initialLayout;
+      desc.finalLayout    = finalLayout;
+      
+      depthRef.attachment = attachments.size();
+      depthRef.layout     = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+      
+      attachments.push_back(desc);
+    }
+    
+    for (uint32_t i = 0; i < MaxNumColorTargets; i++) {
+      colorRef.at(i).attachment = VK_ATTACHMENT_UNUSED;
+      colorRef.at(i).layout     = VK_IMAGE_LAYOUT_UNDEFINED;
+      
+      if (fmt.getColorFormat(i) != VK_FORMAT_UNDEFINED) {
+        VkAttachmentDescription desc;
+        desc.flags          = 0;
+        desc.format         = fmt.getColorFormat(i);
+        desc.samples        = fmt.getSampleCount();
+        desc.loadOp         = loadOp;
+        desc.storeOp        = storeOp;
+        desc.stencilLoadOp  = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+        desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+        desc.initialLayout  = initialLayout;
+        desc.finalLayout    = finalLayout;
+        
+        colorRef.at(i).attachment = attachments.size();
+        colorRef.at(i).layout     = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+        
+        attachments.push_back(desc);
+      }
+    }
+    
+    VkSubpassDescription subpass;
+    subpass.flags                     = 0;
+    subpass.pipelineBindPoint         = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    subpass.inputAttachmentCount      = 0;
+    subpass.pInputAttachments         = nullptr;
+    subpass.colorAttachmentCount      = colorRef.size();
+    subpass.pColorAttachments         = colorRef.data();
+    subpass.pResolveAttachments       = nullptr;
+    subpass.pDepthStencilAttachment   = fmt.getDepthFormat() != VK_FORMAT_UNDEFINED ? &depthRef : nullptr;
+    subpass.preserveAttachmentCount   = 0;
+    subpass.pPreserveAttachments      = nullptr;
+    
+    VkRenderPassCreateInfo info;
+    info.sType                        = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    info.pNext                        = nullptr;
+    info.flags                        = 0;
+    info.attachmentCount              = attachments.size();
+    info.pAttachments                 = attachments.data();
+    info.subpassCount                 = 1;
+    info.pSubpasses                   = &subpass;
+    info.dependencyCount              = 0;
+    info.pDependencies                = nullptr;
+    
+    if (m_vkd->vkCreateRenderPass(m_vkd->device(), &info, nullptr, &m_renderPass) != VK_SUCCESS)
+      throw DxvkError("DxvkRenderPass::DxvkRenderPass: Failed to create render pass object");
+  }
+  
+  
+  DxvkRenderPass::~DxvkRenderPass() {
+    m_vkd->vkDestroyRenderPass(
+      m_vkd->device(), m_renderPass, nullptr);
+  }
+  
+  
+  DxvkRenderPassPool::DxvkRenderPassPool(const Rc<vk::DeviceFn>& vkd)
+  : m_vkd(vkd) { }
+  
+  
+  DxvkRenderPassPool::~DxvkRenderPassPool() {
+    
+  }
+  
+  
+  Rc<DxvkRenderPass> DxvkRenderPassPool::getRenderPass(
+    const DxvkRenderPassFormat& fmt) {
+    std::lock_guard<std::mutex> lock(m_mutex);
+    
+    auto rp = m_renderPasses.find(fmt);
+    
+    if (rp != m_renderPasses.end())
+      return rp->second;
+    
+    auto result = this->createRenderPass(fmt);
+    m_renderPasses.insert(std::make_pair(fmt, result));
+    return result;
+  }
+  
+  
+  Rc<DxvkRenderPass> DxvkRenderPassPool::createRenderPass(
+    const DxvkRenderPassFormat& fmt) {
+    return new DxvkRenderPass(m_vkd, fmt,
+      VK_IMAGE_LAYOUT_GENERAL,
+      VK_IMAGE_LAYOUT_GENERAL);
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_renderpass.h b/src/dxvk/dxvk_renderpass.h
new file mode 100644
index 000000000..2e57ab2e3
--- /dev/null
+++ b/src/dxvk/dxvk_renderpass.h
@@ -0,0 +1,175 @@
+#pragma once
+
+#include <mutex>
+#include <unordered_map>
+
+#include "dxvk_hash.h"
+#include "dxvk_include.h"
+
+namespace dxvk {
+  
+  constexpr uint32_t MaxNumColorTargets = 8;
+  
+  /**
+   * \brief Render pass format
+   * 
+   * Stores the formats of all render targets
+   * that are used by a framebuffer object.
+   */
+  class DxvkRenderPassFormat {
+    
+  public:
+    
+    DxvkRenderPassFormat();
+    
+    /**
+     * \brief Retrieves color target format
+     * 
+     * If the color target has not been defined,
+     * this will return \c VK_FORMAT_UNDEFINED.
+     * \param [in] id Color target index
+     * \returns Color target format
+     */
+    VkFormat getColorFormat(uint32_t id) const {
+      return m_color.at(id);
+    }
+    
+    /**
+     * \brief Retrieves depth-stencil format
+     * 
+     * If the color target has not been defined,
+     * this will return \c VK_FORMAT_UNDEFINED.
+     */
+    VkFormat getDepthFormat() const {
+      return m_depth;
+    }
+    
+    /**
+     * \brief Retrieves sample count
+     * 
+     * If no sample count has been explicitly specitied,
+     * this will return \c VK_SAMPLE_COUNT_1_BIT.
+     * \returns Sample count
+     */
+    VkSampleCountFlagBits getSampleCount() const {
+      return m_samples;
+    }
+    
+    /**
+     * \brief Sets color target format
+     * 
+     * \param [in] id Color target index
+     * \param [in] fmt Color target format
+     */
+    void setColorFormat(uint32_t id, VkFormat fmt) {
+      m_color.at(id) = fmt;
+    }
+    
+    /**
+     * \brief Sets depth-stencil format
+     * \param [in] fmt Depth-stencil format
+     */
+    void setDepthFormat(VkFormat fmt) {
+      m_depth = fmt;
+    }
+    
+    /**
+     * \brief Sets sample count
+     * \param [in] samples Sample count
+     */
+    void setSampleCount(VkSampleCountFlagBits samples) {
+      m_samples = samples;
+    }
+    
+    /**
+     * \brief Computes the hash
+     * \returns Resulting hash
+     */
+    size_t hash() const;
+    
+    bool operator == (const DxvkRenderPassFormat& other) const;
+    bool operator != (const DxvkRenderPassFormat& other) const;
+    
+  private:
+    
+    std::array<VkFormat, MaxNumColorTargets> m_color;
+    VkFormat                                 m_depth;
+    VkSampleCountFlagBits                    m_samples;
+    
+  };
+  
+  
+  /**
+   * \brief DXVK render pass
+   * 
+   * Render pass objects are used internally to identify render
+   * target formats and 
+   */
+  class DxvkRenderPass : public RcObject {
+    
+  public:
+    
+    DxvkRenderPass(
+      const Rc<vk::DeviceFn>&     vkd,
+      const DxvkRenderPassFormat& fmt,
+            VkImageLayout         initialLayout,
+            VkImageLayout         finalLayout);
+    ~DxvkRenderPass();
+    
+    /**
+     * \brief Render pass handle
+     * 
+     * Internal use only.
+     * \returns Render pass handle
+     */
+    VkRenderPass handle() const {
+      return m_renderPass;
+    }
+    
+  private:
+    
+    Rc<vk::DeviceFn>      m_vkd;
+    DxvkRenderPassFormat  m_format;
+    VkRenderPass          m_renderPass;
+    
+  };
+  
+  
+  /**
+   * \brief Render pass pool
+   * 
+   * Thread-safe class that manages the render pass
+   * objects that are used within an application.
+   */
+  class DxvkRenderPassPool {
+    
+  public:
+    
+    DxvkRenderPassPool(const Rc<vk::DeviceFn>& vkd);
+    ~DxvkRenderPassPool();
+    
+    /**
+     * \brief Retrieves a render pass object
+     * 
+     * \param [in] fmt Render target formats
+     * \returns Compatible render pass object
+     */
+    Rc<DxvkRenderPass> getRenderPass(
+      const DxvkRenderPassFormat& fmt);
+    
+  private:
+    
+    Rc<vk::DeviceFn> m_vkd;
+    
+    std::mutex m_mutex;
+    std::unordered_map<
+      DxvkRenderPassFormat,
+      Rc<DxvkRenderPass>,
+      DxvkHash> m_renderPasses;
+    
+    Rc<DxvkRenderPass> createRenderPass(
+      const DxvkRenderPassFormat& fmt);
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_resource.cpp b/src/dxvk/dxvk_resource.cpp
new file mode 100644
index 000000000..af7a2ea4e
--- /dev/null
+++ b/src/dxvk/dxvk_resource.cpp
@@ -0,0 +1,9 @@
+#include "dxvk_resource.h"
+
+namespace dxvk {
+  
+  DxvkResource::~DxvkResource() {
+    
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_resource.h b/src/dxvk/dxvk_resource.h
new file mode 100644
index 000000000..a6ca987ed
--- /dev/null
+++ b/src/dxvk/dxvk_resource.h
@@ -0,0 +1,33 @@
+#pragma once
+
+#include "dxvk_include.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief DXVK resource
+   * 
+   * Keeps track of whether the resource is currently in use
+   * by the GPU. As soon as a command that uses the resource
+   * is recorded, it will be marked as 'in use'.
+   */
+  class DxvkResource : public RcObject {
+    
+  public:
+    
+    virtual ~DxvkResource();
+    
+    bool isInUse() const {
+      return m_useCount != 0;
+    }
+    
+    void incUseCount() { m_useCount += 1; }
+    void decUseCount() { m_useCount -= 1; }
+    
+  private:
+    
+    std::atomic<uint32_t> m_useCount = { 0u };
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_surface.cpp b/src/dxvk/dxvk_surface.cpp
new file mode 100644
index 000000000..302ca6da2
--- /dev/null
+++ b/src/dxvk/dxvk_surface.cpp
@@ -0,0 +1,150 @@
+#include "dxvk_surface.h"
+
+#include "../util/util_math.h"
+
+namespace dxvk {
+  
+  DxvkSurface::DxvkSurface(
+    const Rc<DxvkAdapter>&    adapter,
+          HINSTANCE           instance,
+          HWND                window)
+  : m_adapter         (adapter),
+    m_vki             (adapter->vki()),
+    m_handle          (createSurface(instance, window)),
+    m_surfaceFormats  (getSurfaceFormats()),
+    m_presentModes    (getPresentModes()) {
+    
+  }
+  
+  
+  DxvkSurface::~DxvkSurface() {
+    m_vki->vkDestroySurfaceKHR(
+      m_vki->instance(), m_handle, nullptr);
+  }
+  
+  
+  VkSurfaceCapabilitiesKHR DxvkSurface::getSurfaceCapabilities() const {
+    VkSurfaceCapabilitiesKHR surfaceCaps;
+    if (m_vki->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+          m_adapter->handle(), m_handle, &surfaceCaps) != VK_SUCCESS)
+      throw DxvkError("DxvkSurface::getSurfaceCapabilities: Failed to query surface capabilities");
+    return surfaceCaps;
+  }
+  
+  
+  VkSurfaceFormatKHR DxvkSurface::pickSurfaceFormat(VkSurfaceFormatKHR preferred) const {
+    // If the implementation allows us to freely choose
+    // the format, we'll just use the preferred format.
+    if (m_surfaceFormats.size() == 1 && m_surfaceFormats.at(0).format == VK_FORMAT_UNDEFINED)
+      return preferred;
+    
+    // If the preferred format is explicitly listed in
+    // the array of supported surface formats, use it
+    for (auto fmt : m_surfaceFormats) {
+      if (fmt.format     == preferred.format
+       && fmt.colorSpace == preferred.colorSpace)
+        return fmt;
+    }
+    
+    // Otherwise, fall back to the first format
+    return m_surfaceFormats.at(0);
+  }
+  
+  
+  VkPresentModeKHR DxvkSurface::pickPresentMode(VkPresentModeKHR preferred) const {
+    for (auto mode : m_presentModes) {
+      if (mode == preferred)
+        return mode;
+    }
+    
+    // This mode is guaranteed to be available
+    return VK_PRESENT_MODE_FIFO_KHR;
+  }
+  
+  
+  uint32_t DxvkSurface::pickImageCount(
+    const VkSurfaceCapabilitiesKHR& caps,
+          VkPresentModeKHR          mode) const {
+    uint32_t count = caps.minImageCount;
+    
+    if (mode == VK_PRESENT_MODE_MAILBOX_KHR
+     || mode == VK_PRESENT_MODE_FIFO_KHR)
+      count += 1;
+    
+    if (count > caps.maxImageCount && caps.maxImageCount != 0)
+      count = caps.maxImageCount;
+    
+    return count;
+  }
+  
+  
+  VkExtent2D DxvkSurface::pickImageExtent(
+    const VkSurfaceCapabilitiesKHR& caps,
+          VkExtent2D                preferred) const {
+    if (caps.currentExtent.width != std::numeric_limits<uint32_t>::max())
+      return caps.currentExtent;
+    
+    VkExtent2D actual;
+    actual.width  = clamp(preferred.width,  caps.minImageExtent.width,  caps.maxImageExtent.width);
+    actual.height = clamp(preferred.height, caps.minImageExtent.height, caps.maxImageExtent.height);
+    return actual;
+  }
+  
+  
+  VkSurfaceKHR DxvkSurface::createSurface(HINSTANCE instance, HWND window) {
+    VkWin32SurfaceCreateInfoKHR info;
+    info.sType      = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
+    info.pNext      = nullptr;
+    info.flags      = 0;
+    info.hinstance  = instance;
+    info.hwnd       = window;
+    
+    VkSurfaceKHR surface = VK_NULL_HANDLE;
+    if (m_vki->vkCreateWin32SurfaceKHR(m_vki->instance(), &info, nullptr, &surface) != VK_SUCCESS)
+      throw DxvkError("DxvkSurface::createSurface: Failed to create win32 surface");
+    
+    VkBool32 supportStatus = VK_FALSE;
+    
+    if (m_vki->vkGetPhysicalDeviceSurfaceSupportKHR(m_adapter->handle(),
+          m_adapter->presentQueueFamily(), surface, &supportStatus) != VK_SUCCESS) {
+      m_vki->vkDestroySurfaceKHR(m_vki->instance(), surface, nullptr);
+      throw DxvkError("DxvkSurface::createSurface: Failed to query surface support");
+    }
+    
+    if (!supportStatus) {
+      m_vki->vkDestroySurfaceKHR(m_vki->instance(), surface, nullptr);
+      throw DxvkError("DxvkSurface::createSurface: Surface not supported by device");
+    }
+    
+    return surface;
+  }
+  
+  
+  std::vector<VkSurfaceFormatKHR> DxvkSurface::getSurfaceFormats() {
+    uint32_t numFormats = 0;
+    if (m_vki->vkGetPhysicalDeviceSurfaceFormatsKHR(
+          m_adapter->handle(), m_handle, &numFormats, nullptr) != VK_SUCCESS)
+      throw DxvkError("DxvkSurface::getSurfaceFormats: Failed to query surface formats");
+    
+    std::vector<VkSurfaceFormatKHR> formats(numFormats);
+    if (m_vki->vkGetPhysicalDeviceSurfaceFormatsKHR(
+          m_adapter->handle(), m_handle, &numFormats, formats.data()) != VK_SUCCESS)
+      throw DxvkError("DxvkSurface::getSurfaceFormats: Failed to query surface formats");
+    return formats;
+  }
+  
+  
+  std::vector<VkPresentModeKHR> DxvkSurface::getPresentModes() {
+    uint32_t numModes = 0;
+    if (m_vki->vkGetPhysicalDeviceSurfacePresentModesKHR(
+          m_adapter->handle(), m_handle, &numModes, nullptr) != VK_SUCCESS)
+      throw DxvkError("DxvkSurface::getPresentModes: Failed to query present modes");
+    
+    std::vector<VkPresentModeKHR> modes(numModes);
+    if (m_vki->vkGetPhysicalDeviceSurfacePresentModesKHR(
+          m_adapter->handle(), m_handle, &numModes, modes.data()) != VK_SUCCESS)
+      throw DxvkError("DxvkSurface::getPresentModes: Failed to query present modes");
+    return modes;
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_surface.h b/src/dxvk/dxvk_surface.h
new file mode 100644
index 000000000..ee25508ec
--- /dev/null
+++ b/src/dxvk/dxvk_surface.h
@@ -0,0 +1,98 @@
+#pragma once
+
+#include "dxvk_adapter.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief DXVK surface
+   * 
+   * Vulkan representation of a drawable window surface. This
+   * class provides methods to query the current dimension of
+   * the surface as well as format support queries.
+   */
+  class DxvkSurface : public RcObject {
+    
+  public:
+    
+    DxvkSurface(
+      const Rc<DxvkAdapter>&    adapter,
+            HINSTANCE           instance,
+            HWND                window);
+    ~DxvkSurface();
+    
+    /**
+     * \brief Vulkan surface handle
+     * \returns The surface handle
+     */
+    VkSurfaceKHR handle() const {
+      return m_handle;
+    }
+    
+    /**
+     * \brief Queries surface capabilities
+     * 
+     * Retrieves up-to-date information about the surface,
+     * such as the bounds of the swapchain images.
+     * \returns Current surface properties
+     */
+    VkSurfaceCapabilitiesKHR getSurfaceCapabilities() const;
+    
+    /**
+     * \brief Picks a suitable surface format
+     * 
+     * \param [in] preferred Preferred surface format
+     * \returns The actual surface format
+     */
+    VkSurfaceFormatKHR pickSurfaceFormat(
+      VkSurfaceFormatKHR preferred) const;
+    
+    /**
+     * \brief Picks a supported present mode
+     * 
+     * \param [in] preferred The preferred present mode
+     * \returns The actual present mode
+     */
+    VkPresentModeKHR pickPresentMode(
+      VkPresentModeKHR preferred) const;
+    
+    /**
+     * \brief Picks a suitable image count for a swap chain
+     * 
+     * \param [in] caps Surface capabilities
+     * \param [in] mode The present mode
+     * \returns Suitable image count
+     */
+    uint32_t pickImageCount(
+      const VkSurfaceCapabilitiesKHR& caps,
+            VkPresentModeKHR          mode) const;
+    
+    /**
+     * \brief Picks a suitable image size for a swap chain
+     * 
+     * \param [in] caps Surface capabilities
+     * \param [in] preferred Preferred image size
+     * \returns Selected image size
+     */
+    VkExtent2D pickImageExtent(
+      const VkSurfaceCapabilitiesKHR& caps,
+            VkExtent2D                preferred) const;
+    
+  private:
+    
+    Rc<DxvkAdapter>    m_adapter;
+    Rc<vk::InstanceFn> m_vki;
+    
+    VkSurfaceKHR m_handle;
+    
+    std::vector<VkSurfaceFormatKHR> m_surfaceFormats;
+    std::vector<VkPresentModeKHR>   m_presentModes;
+    
+    VkSurfaceKHR createSurface(HINSTANCE instance, HWND window);
+    
+    std::vector<VkSurfaceFormatKHR> getSurfaceFormats();
+    std::vector<VkPresentModeKHR> getPresentModes();
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_swapchain.cpp b/src/dxvk/dxvk_swapchain.cpp
new file mode 100644
index 000000000..f55e39b88
--- /dev/null
+++ b/src/dxvk/dxvk_swapchain.cpp
@@ -0,0 +1,185 @@
+#include "dxvk_main.h"
+
+#include "dxvk_device.h"
+#include "dxvk_framebuffer.h"
+#include "dxvk_surface.h"
+#include "dxvk_swapchain.h"
+
+namespace dxvk {
+  
+  DxvkSwapchain::DxvkSwapchain(
+    const Rc<DxvkDevice>&           device,
+    const Rc<DxvkSurface>&          surface,
+    const DxvkSwapchainProperties&  properties,
+          VkQueue                   queue)
+  : m_device    (device),
+    m_vkd       (device->vkd()),
+    m_surface   (surface),
+    m_queue     (queue),
+    m_properties(properties) {
+    this->recreateSwapchain();
+  }
+  
+  
+  DxvkSwapchain::~DxvkSwapchain() {
+    m_device->waitForIdle();
+    m_vkd->vkDestroySwapchainKHR(
+      m_vkd->device(), m_handle, nullptr);
+  }
+  
+  
+  Rc<DxvkFramebuffer> DxvkSwapchain::getFramebuffer(
+    const Rc<DxvkSemaphore>& wakeSync) {
+    VkResult status = this->acquireNextImage(wakeSync);
+    
+    if (status == VK_ERROR_OUT_OF_DATE_KHR) {
+      this->recreateSwapchain();
+      status = this->acquireNextImage(wakeSync);
+    }
+    
+    if (status != VK_SUCCESS
+     && status != VK_SUBOPTIMAL_KHR)
+      throw DxvkError("DxvkSwapchain::getFramebuffer: Failed to acquire image");
+    
+    return m_framebuffers.at(m_imageIndex);
+  }
+  
+  
+  void DxvkSwapchain::present(const Rc<DxvkSemaphore>& waitSync) {
+    const VkSemaphore waitSemaphore = waitSync->handle();
+    
+    VkPresentInfoKHR info;
+    info.sType              = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
+    info.pNext              = nullptr;
+    info.waitSemaphoreCount = 1;
+    info.pWaitSemaphores    = &waitSemaphore;
+    info.swapchainCount     = 1;
+    info.pSwapchains        = &m_handle;
+    info.pImageIndices      = &m_imageIndex;
+    info.pResults           = nullptr;
+    
+    VkResult status = m_vkd->vkQueuePresentKHR(m_queue, &info);
+    
+    if (status != VK_SUCCESS
+     && status != VK_SUBOPTIMAL_KHR
+     && status != VK_ERROR_OUT_OF_DATE_KHR)
+      throw DxvkError("DxvkSwapchain::present: Failed to present image");
+  }
+  
+  
+  void DxvkSwapchain::changeProperties(
+    const DxvkSwapchainProperties& props) {
+    m_properties = props;
+    this->recreateSwapchain();
+  }
+  
+  
+  VkResult DxvkSwapchain::acquireNextImage(
+    const Rc<DxvkSemaphore>& wakeSync) {
+    return m_vkd->vkAcquireNextImageKHR(
+      m_vkd->device(), m_handle,
+      std::numeric_limits<uint64_t>::max(),
+      wakeSync->handle(),
+      VK_NULL_HANDLE,
+      &m_imageIndex);
+  }
+  
+  
+  void DxvkSwapchain::recreateSwapchain() {
+    VkSwapchainKHR oldSwapchain = m_handle;
+    
+    // Wait until we can be certain that none of our
+    // resources are still in use by the device.
+    m_device->waitForIdle();
+    
+    // Recreate the actual swapchain object
+    auto caps = m_surface->getSurfaceCapabilities();
+    auto fmt  = m_surface->pickSurfaceFormat(m_properties.preferredSurfaceFormat);
+    auto mode = m_surface->pickPresentMode  (m_properties.preferredPresentMode);
+    
+    VkSwapchainCreateInfoKHR swapInfo;
+    swapInfo.sType                  = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
+    swapInfo.pNext                  = nullptr;
+    swapInfo.flags                  = 0;
+    swapInfo.surface                = m_surface->handle();
+    swapInfo.minImageCount          = m_surface->pickImageCount(caps, mode);
+    swapInfo.imageFormat            = fmt.format;
+    swapInfo.imageColorSpace        = fmt.colorSpace;
+    swapInfo.imageExtent            = m_surface->pickImageExtent(caps, m_properties.preferredBufferSize);
+    swapInfo.imageArrayLayers       = 1;
+    swapInfo.imageUsage             = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    swapInfo.imageSharingMode       = VK_SHARING_MODE_EXCLUSIVE;
+    swapInfo.queueFamilyIndexCount  = 0;
+    swapInfo.pQueueFamilyIndices    = nullptr;
+    swapInfo.preTransform           = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
+    swapInfo.compositeAlpha         = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
+    swapInfo.presentMode            = mode;
+    swapInfo.clipped                = VK_TRUE;
+    swapInfo.oldSwapchain           = oldSwapchain;
+    
+    if (m_vkd->vkCreateSwapchainKHR(m_vkd->device(), &swapInfo, nullptr, &m_handle) != VK_SUCCESS)
+      throw DxvkError("DxvkSwapchain::recreateSwapchain: Failed to recreate swap chain");
+    
+    // Destroy previous swapchain object
+    m_vkd->vkDestroySwapchainKHR(
+      m_vkd->device(), oldSwapchain, nullptr);
+    
+    // Create the render pass object
+    DxvkRenderPassFormat renderTargetFormat;
+    renderTargetFormat.setColorFormat(0, fmt.format);
+    
+    m_renderPass = new DxvkRenderPass(
+      m_vkd, renderTargetFormat,
+      VK_IMAGE_LAYOUT_UNDEFINED,
+      VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
+    
+    // Retrieve swap images
+    auto swapImages = this->retrieveSwapImages();
+    m_framebuffers.resize(swapImages.size());
+    
+    for (size_t i = 0; i < swapImages.size(); i++) {
+      DxvkImageCreateInfo imageInfo;
+      imageInfo.type          = VK_IMAGE_TYPE_2D;
+      imageInfo.format        = fmt.format;
+      imageInfo.sampleCount   = VK_SAMPLE_COUNT_1_BIT;
+      imageInfo.extent.width  = swapInfo.imageExtent.width;
+      imageInfo.extent.height = swapInfo.imageExtent.height;
+      imageInfo.extent.depth  = 1;
+      imageInfo.numLayers     = swapInfo.imageArrayLayers;
+      imageInfo.mipLevels     = 1;
+      imageInfo.usage         = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+      imageInfo.tiling        = VK_IMAGE_TILING_OPTIMAL;
+      
+      DxvkImageViewCreateInfo viewInfo;
+      viewInfo.type         = VK_IMAGE_VIEW_TYPE_2D;
+      viewInfo.format       = fmt.format;
+      viewInfo.aspect       = VK_IMAGE_ASPECT_COLOR_BIT;
+      viewInfo.minLevel     = 0;
+      viewInfo.numLevels    = 1;
+      viewInfo.minLayer     = 0;
+      viewInfo.numLayers    = swapInfo.imageArrayLayers;
+      
+      Rc<DxvkImage> image = new DxvkImage(m_vkd, imageInfo, swapImages.at(i));
+      Rc<DxvkImageView> iview = m_device->createImageView(image, viewInfo);
+      
+      DxvkRenderTargets renderTargets;
+      renderTargets.setColorTarget(0, iview);
+      
+      m_framebuffers.at(i) = new DxvkFramebuffer(
+        m_vkd, m_renderPass, renderTargets);
+    }
+  }
+  
+  
+  std::vector<VkImage> DxvkSwapchain::retrieveSwapImages() {
+    uint32_t imageCount = 0;
+    if (m_vkd->vkGetSwapchainImagesKHR(m_vkd->device(), m_handle, &imageCount, nullptr) != VK_SUCCESS)
+      throw DxvkError("DxvkSwapchain::recreateSwapchain: Failed to retrieve swap chain images");
+    
+    std::vector<VkImage> images(imageCount);
+    if (m_vkd->vkGetSwapchainImagesKHR(m_vkd->device(), m_handle, &imageCount, images.data()) != VK_SUCCESS)
+      throw DxvkError("DxvkSwapchain::recreateSwapchain: Failed to retrieve swap chain images");
+    return images;
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_swapchain.h b/src/dxvk/dxvk_swapchain.h
new file mode 100644
index 000000000..a495510ab
--- /dev/null
+++ b/src/dxvk/dxvk_swapchain.h
@@ -0,0 +1,96 @@
+#pragma once
+
+#include "dxvk_framebuffer.h"
+#include "dxvk_sync.h"
+
+namespace dxvk {
+ 
+  class DxvkDevice;
+  class DxvkFramebuffer;
+  class DxvkSurface;
+  
+  /**
+   * \brief 
+   */
+  struct DxvkSwapchainProperties {
+    VkSurfaceFormatKHR preferredSurfaceFormat;
+    VkPresentModeKHR   preferredPresentMode;
+    VkExtent2D         preferredBufferSize;
+  };
+  
+  
+  /**
+   * \brief DXVK swapchain
+   * 
+   * Manages a Vulkan swapchain object.
+   */
+  class DxvkSwapchain : public RcObject {
+    
+  public:
+    
+    DxvkSwapchain(
+      const Rc<DxvkDevice>&           device,
+      const Rc<DxvkSurface>&          surface,
+      const DxvkSwapchainProperties&  properties,
+            VkQueue                   queue);
+    ~DxvkSwapchain();
+    
+    /**
+     * \brief Retrieves the framebuffer for the current frame
+     * 
+     * If necessary, this will automatically recreate the
+     * underlying swapchain object and framebuffer objects.
+     * \param [in] wakeSync Semaphore to signal
+     * \returns The framebuffer object
+     */
+    Rc<DxvkFramebuffer> getFramebuffer(
+      const Rc<DxvkSemaphore>& wakeSync);
+    
+    /**
+     * \brief Presents the current framebuffer
+     * 
+     * This may actually fail to present an image. If that is the
+     * case, the surface contents will be undefined for this frame
+     * and the swapchain object will be recreated.
+     * \param [in] waitSync Semaphore to wait on
+     */
+    void present(
+      const Rc<DxvkSemaphore>& waitSync);
+    
+    /**
+     * \brief Changes swapchain properties
+     * 
+     * This must not be called between \ref getFramebuffer
+     * and \ref present as this method may recreate the swap
+     * chain and framebuffer objects immediately.
+     * \param [in] props New swapchain properties
+     */
+    void changeProperties(
+      const DxvkSwapchainProperties& props);
+    
+  private:
+    
+    Rc<DxvkDevice>          m_device;
+    Rc<vk::DeviceFn>        m_vkd;
+    Rc<DxvkSurface>         m_surface;
+    
+    VkQueue                 m_queue;
+    
+    DxvkSwapchainProperties m_properties;
+    VkSwapchainKHR          m_handle     = VK_NULL_HANDLE;
+    uint32_t                m_imageIndex = 0;
+    uint32_t                m_frameIndex = 0;
+    
+    Rc<DxvkRenderPass>               m_renderPass;
+    std::vector<Rc<DxvkFramebuffer>> m_framebuffers;
+    
+    VkResult acquireNextImage(
+      const Rc<DxvkSemaphore>& wakeSync);
+    
+    void recreateSwapchain();
+    
+    std::vector<VkImage> retrieveSwapImages();
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_sync.cpp b/src/dxvk/dxvk_sync.cpp
new file mode 100644
index 000000000..aabf32a76
--- /dev/null
+++ b/src/dxvk/dxvk_sync.cpp
@@ -0,0 +1,57 @@
+#include "dxvk_sync.h"
+
+namespace dxvk {
+  
+  DxvkSemaphore::DxvkSemaphore(
+    const Rc<vk::DeviceFn>& vkd)
+  : m_vkd(vkd) {
+    VkSemaphoreCreateInfo info;
+    info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    info.pNext = nullptr;
+    info.flags = 0;
+    
+    if (m_vkd->vkCreateSemaphore(m_vkd->device(), &info, nullptr, &m_semaphore) != VK_SUCCESS)
+      throw DxvkError("DxvkSemaphore::DxvkSemaphore: Failed to create semaphore");
+  }
+  
+  
+  DxvkSemaphore::~DxvkSemaphore() {
+    m_vkd->vkDestroySemaphore(
+      m_vkd->device(), m_semaphore, nullptr);
+  }
+  
+  
+  DxvkFence::DxvkFence(const Rc<vk::DeviceFn>& vkd)
+  : m_vkd(vkd) {
+    VkFenceCreateInfo info;
+    info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    info.pNext = nullptr;
+    info.flags = 0;
+    
+    if (m_vkd->vkCreateFence(m_vkd->device(), &info, nullptr, &m_fence) != VK_SUCCESS)
+      throw DxvkError("DxvkFence::DxvkFence: Failed to create fence");
+  }
+  
+  
+  DxvkFence::~DxvkFence() {
+    m_vkd->vkDestroyFence(
+      m_vkd->device(), m_fence, nullptr);
+  }
+  
+  
+  bool DxvkFence::wait(uint64_t timeout) const {
+    VkResult status = m_vkd->vkWaitForFences(
+      m_vkd->device(), 1, &m_fence, VK_FALSE, timeout);
+    
+    if (status == VK_SUCCESS) return true;
+    if (status == VK_TIMEOUT) return false;
+    throw DxvkError("DxvkFence::wait: Failed to wait for fence");
+  }
+  
+  
+  void DxvkFence::reset() {
+    if (m_vkd->vkResetFences(m_vkd->device(), 1, &m_fence) != VK_SUCCESS)
+      throw DxvkError("DxvkFence::reset: Failed to reset fence");
+  }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/dxvk_sync.h b/src/dxvk/dxvk_sync.h
new file mode 100644
index 000000000..9968f0db7
--- /dev/null
+++ b/src/dxvk/dxvk_sync.h
@@ -0,0 +1,89 @@
+#pragma once
+
+#include "dxvk_resource.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief Semaphore object
+   * 
+   * This is merely an abstraction of Vulkan's semaphores.
+   * They are only used internally by \ref DxvkSwapchain
+   * in order to synchronize the presentation engine with
+   * command buffer submissions.
+   */
+  class DxvkSemaphore : public DxvkResource {
+    
+  public:
+    
+    DxvkSemaphore(const Rc<vk::DeviceFn>& vkd);
+    ~DxvkSemaphore();
+    
+    /**
+     * \brief Semaphore handle
+     * 
+     * Internal use only.
+     * \returns Semaphore handle
+     */
+    VkSemaphore handle() const {
+      return m_semaphore;
+    }
+    
+  private:
+    
+    Rc<vk::DeviceFn>  m_vkd;
+    VkSemaphore       m_semaphore;
+    
+  };
+  
+  
+  /**
+   * \brief Fence object
+   * 
+   * This is merely an abstraction of Vulkan's fences. Client
+   * APIs that support fence operations may use them directly.
+   * Other than that, they are used internally to keep track
+   * of GPU resource usage.
+   */
+  class DxvkFence : public RcObject {
+    
+  public:
+    
+    DxvkFence(const Rc<vk::DeviceFn>& vkd);
+    ~DxvkFence();
+    
+    /**
+     * \brief Fence handle
+     * 
+     * Internal use only.
+     * \returns Fence handle
+     */
+    VkFence handle() const {
+      return m_fence;
+    }
+    
+    /**
+     * \brief Waits for fence to be signaled
+     * 
+     * \param [in] timeout Amount of time to wait
+     * \returns \c true if the fence has been signaled,
+     *          \c false if a timeout occured.
+     */
+    bool wait(uint64_t timeout) const;
+    
+    /**
+     * \brief Resets the fence
+     * 
+     * Transitions the fence into the unsignaled state,
+     * which means that the fence may be submitted again.
+     */
+    void reset();
+    
+  private:
+    
+    Rc<vk::DeviceFn>  m_vkd;
+    VkFence           m_fence;
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/meson.build b/src/dxvk/meson.build
new file mode 100644
index 000000000..89d0551e8
--- /dev/null
+++ b/src/dxvk/meson.build
@@ -0,0 +1,32 @@
+dxvk_src = files([
+  'dxvk_adapter.cpp',
+  'dxvk_cmdlist.cpp',
+  'dxvk_context.cpp',
+  'dxvk_context_state.cpp',
+  'dxvk_device.cpp',
+  'dxvk_framebuffer.cpp',
+  'dxvk_image.cpp',
+  'dxvk_instance.cpp',
+  'dxvk_lifetime.cpp',
+  'dxvk_main.cpp',
+  'dxvk_memory.cpp',
+  'dxvk_renderpass.cpp',
+  'dxvk_resource.cpp',
+  'dxvk_surface.cpp',
+  'dxvk_swapchain.cpp',
+  'dxvk_sync.cpp',
+  
+  'vulkan/dxvk_vulkan_extensions.cpp',
+  'vulkan/dxvk_vulkan_loader.cpp',
+])
+
+thread_dep = dependency('threads')
+
+dxvk_lib = static_library('dxvk', dxvk_src,
+  link_with           : [ util_lib ],
+  dependencies        : [ thread_dep, lib_vulkan, lib_sdl2 ],
+  include_directories : [ dxvk_include_path ])
+
+dxvk_dep = declare_dependency(
+  link_with           : [ dxvk_lib ],
+  include_directories : [ dxvk_include_path, include_directories('.') ])
\ No newline at end of file
diff --git a/src/dxvk/vulkan/dxvk_vulkan_extensions.cpp b/src/dxvk/vulkan/dxvk_vulkan_extensions.cpp
new file mode 100644
index 000000000..0b596cc89
--- /dev/null
+++ b/src/dxvk/vulkan/dxvk_vulkan_extensions.cpp
@@ -0,0 +1,71 @@
+#include "dxvk_vulkan_extensions.h"
+
+namespace dxvk::vk {
+  
+  bool NameSet::supports(const char* name) const {
+    return m_names.find(name) != m_names.end();
+  }
+  
+  
+  NameSet NameSet::enumerateInstanceLayers(const LibraryFn& vkl) {
+    uint32_t layerCount = 0;
+    if (vkl.vkEnumerateInstanceLayerProperties(&layerCount, nullptr) != VK_SUCCESS)
+      throw DxvkError(str::format("LayerSet::enumerateInstanceLayers: Failed to query instance layers"));
+    
+    std::vector<VkLayerProperties> layers(layerCount);
+    if (vkl.vkEnumerateInstanceLayerProperties(&layerCount, layers.data()) != VK_SUCCESS)
+      throw DxvkError(str::format("LayerSet::enumerateInstanceLayers: Failed to query instance layers"));
+    
+    NameSet result;
+    for (const auto& layer : layers)
+      result.m_names.insert(layer.layerName);
+    return result;
+  }
+  
+  
+  NameSet NameSet::enumerateInstanceExtensions(
+    const LibraryFn&        vkl,
+    const NameList&         layers) {
+    NameSet result;
+    result.addInstanceLayerExtensions(vkl, nullptr);
+    
+    for (size_t i = 0; i < layers.count(); i++)
+      result.addInstanceLayerExtensions(vkl, layers.name(i));
+    return result;
+  }
+  
+  
+  NameSet NameSet::enumerateDeviceExtensions(
+    const InstanceFn&       vki,
+          VkPhysicalDevice  device) {
+    uint32_t extCount = 0;
+    if (vki.vkEnumerateDeviceExtensionProperties(device, nullptr, &extCount, nullptr) != VK_SUCCESS)
+      throw DxvkError("ExtensionSet::addDeviceExtensions: Failed to query device extensions");
+    
+    std::vector<VkExtensionProperties> extensions(extCount);
+    if (vki.vkEnumerateDeviceExtensionProperties(device, nullptr, &extCount, extensions.data()) != VK_SUCCESS)
+      throw DxvkError("ExtensionSet::addDeviceExtensions: Failed to query device extensions");
+    
+    NameSet result;
+    for (const auto& ext : extensions)
+      result.m_names.insert(ext.extensionName);
+    return result;
+  }
+  
+  
+  void NameSet::addInstanceLayerExtensions(
+    const LibraryFn&        vkl,
+    const char*             layer) {
+    uint32_t extCount = 0;
+    if (vkl.vkEnumerateInstanceExtensionProperties(layer, &extCount, nullptr) != VK_SUCCESS)
+      throw DxvkError("ExtensionSet::addInstanceExtensions: Failed to query instance extensions");
+    
+    std::vector<VkExtensionProperties> extensions(extCount);
+    if (vkl.vkEnumerateInstanceExtensionProperties(layer, &extCount, extensions.data()) != VK_SUCCESS)
+      throw DxvkError("ExtensionSet::addInstanceExtensions: Failed to query instance extensions");
+    
+    for (const auto& ext : extensions)
+      m_names.insert(ext.extensionName);
+  }
+  
+}
diff --git a/src/dxvk/vulkan/dxvk_vulkan_extensions.h b/src/dxvk/vulkan/dxvk_vulkan_extensions.h
new file mode 100644
index 000000000..c8ae48ffb
--- /dev/null
+++ b/src/dxvk/vulkan/dxvk_vulkan_extensions.h
@@ -0,0 +1,103 @@
+#pragma once
+
+#include <string>
+#include <vector>
+#include <unordered_set>
+
+#include "dxvk_vulkan_loader.h"
+
+#include "../../util/util_error.h"
+#include "../../util/util_string.h"
+
+namespace dxvk::vk {
+  
+  /**
+   * \brief Name list
+   * 
+   * Stores a list of extension or layer names.
+   * Note that only name constants may be added
+   * to a name list. Adding dynamically allocated
+   * strings will result in udefined behaviour.
+   */
+  class NameList {
+    
+  public:
+    
+    void add(const char* name) {
+      m_list.push_back(name);
+    }
+    
+    auto name(size_t i) const {
+      return m_list[i];
+    }
+    
+    auto names() const { return m_list.data(); }
+    auto count() const { return m_list.size(); }
+    
+  private:
+    
+    std::vector<const char*> m_list;
+    
+  };
+  
+  
+  /**
+   * \brief Name set
+   * 
+   * Stores a set of supported layers or extensions and
+   * provides methods to query their support status.
+   */
+  class NameSet {
+    
+  public:
+    
+    /**
+     * \brief Checks whether an extension or layer is supported
+     * 
+     * \param [in] name The layer or extension name
+     * \returns \c true if the entity is supported
+     */
+    bool supports(const char* name) const;
+    
+    /**
+     * \brief Enumerates instance layers
+     * 
+     * \param [in] vkl Vulkan library functions
+     * \returns Available instance layers
+     */
+    static NameSet enumerateInstanceLayers(
+      const LibraryFn&        vkl);
+    
+    /**
+     * \brief Enumerates instance extensions
+     * 
+     * \param [in] vkl Vulkan library functions
+     * \param [in] layers Enabled instance layers
+     * \returns Available instance extensions
+     */
+    static NameSet enumerateInstanceExtensions(
+      const LibraryFn&        vkl,
+      const NameList&         layers);
+    
+    /**
+     * \brief Enumerates device extensions
+     * 
+     * \param [in] vki Vulkan instance functions
+     * \param [in] device The physical device
+     * \returns Available device extensions
+     */
+    static NameSet enumerateDeviceExtensions(
+      const InstanceFn&       vki,
+            VkPhysicalDevice  device);
+    
+  private:
+    
+    std::unordered_set<std::string> m_names;
+    
+    void addInstanceLayerExtensions(
+      const LibraryFn&        vkl,
+      const char*             layer);
+    
+  };
+  
+}
diff --git a/src/dxvk/vulkan/dxvk_vulkan_loader.cpp b/src/dxvk/vulkan/dxvk_vulkan_loader.cpp
new file mode 100644
index 000000000..79110a08d
--- /dev/null
+++ b/src/dxvk/vulkan/dxvk_vulkan_loader.cpp
@@ -0,0 +1,43 @@
+#include "dxvk_vulkan_loader.h"
+
+namespace dxvk::vk {
+  
+  PFN_vkVoidFunction LibraryLoader::sym(const char* name) const {
+    return ::vkGetInstanceProcAddr(nullptr, name);
+  }
+  
+  
+  InstanceLoader::InstanceLoader(VkInstance instance)
+  : m_instance(instance) { }
+  
+  
+  PFN_vkVoidFunction InstanceLoader::sym(const char* name) const {
+    return ::vkGetInstanceProcAddr(m_instance, name);
+  }
+  
+  
+  DeviceLoader::DeviceLoader(VkInstance instance, VkDevice device)
+  : m_getDeviceProcAddr(reinterpret_cast<PFN_vkGetDeviceProcAddr>(
+      ::vkGetInstanceProcAddr(instance, "vkGetDeviceProcAddr"))),
+    m_device(device) { }
+  
+  
+  PFN_vkVoidFunction DeviceLoader::sym(const char* name) const {
+    return m_getDeviceProcAddr(m_device, name);
+  }
+  
+  
+  LibraryFn::LibraryFn() { }
+  LibraryFn::~LibraryFn() { }
+  
+  
+  InstanceFn::InstanceFn(VkInstance instance)
+  : InstanceLoader(instance) { }
+  InstanceFn::~InstanceFn() { }
+  
+  
+  DeviceFn::DeviceFn(VkInstance instance, VkDevice device)
+  : DeviceLoader(instance, device) { }
+  DeviceFn::~DeviceFn() { }
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/vulkan/dxvk_vulkan_loader.h b/src/dxvk/vulkan/dxvk_vulkan_loader.h
new file mode 100644
index 000000000..60adbb741
--- /dev/null
+++ b/src/dxvk/vulkan/dxvk_vulkan_loader.h
@@ -0,0 +1,267 @@
+#pragma once
+
+#include "../../util/rc/util_rc.h"
+#include "../../util/rc/util_rc_ptr.h"
+
+#include "dxvk_vulkan_loader_fn.h"
+
+namespace dxvk::vk {
+  
+  /**
+   * \brief Vulkan library loader
+   * 
+   * Provides methods to load Vulkan functions that
+   * can be called before creating a  instance.
+   */
+  struct LibraryLoader : public RcObject {
+    PFN_vkVoidFunction sym(const char* name) const;
+  };
+  
+  
+  /**
+   * \brief Vulkan instance loader
+   * 
+   * Loads Vulkan functions that can be
+   * called for a specific instance.
+   */
+  struct InstanceLoader : public RcObject {
+    InstanceLoader(VkInstance instance);
+    PFN_vkVoidFunction sym(const char* name) const;
+    VkInstance instance() const { return m_instance; }
+  private:
+    const VkInstance m_instance;
+  };
+  
+  
+  /**
+   * \brief Vulkan device loader
+   * 
+   * Loads Vulkan functions for a
+   * specific device.
+   */
+  struct DeviceLoader : public RcObject {
+    DeviceLoader(VkInstance instance, VkDevice device);
+    PFN_vkVoidFunction sym(const char* name) const;
+    VkDevice device() const { return m_device; }
+  private:
+    const PFN_vkGetDeviceProcAddr m_getDeviceProcAddr;
+    const VkDevice                m_device;
+  };
+  
+  
+  /**
+   * \brief Vulkan library functions
+   * 
+   * Vulkan functions that are called before
+   * creating an actual Vulkan instance.
+   */
+  struct LibraryFn : LibraryLoader {
+    LibraryFn();
+    ~LibraryFn();
+    
+    VULKAN_FN(vkCreateInstance);
+    VULKAN_FN(vkEnumerateInstanceLayerProperties);
+    VULKAN_FN(vkEnumerateInstanceExtensionProperties);
+  };
+  
+  
+  /**
+   * \brief Vulkan instance functions
+   * 
+   * Vulkan functions for a given instance that
+   * are independent of any Vulkan devices.
+   */
+  struct InstanceFn : InstanceLoader {
+    InstanceFn(VkInstance instance);
+    ~InstanceFn();
+    
+    VULKAN_FN(vkCreateDevice);
+    VULKAN_FN(vkDestroyInstance);
+    VULKAN_FN(vkEnumerateDeviceExtensionProperties);
+    VULKAN_FN(vkEnumeratePhysicalDevices);
+    VULKAN_FN(vkGetPhysicalDeviceFeatures);
+    VULKAN_FN(vkGetPhysicalDeviceFormatProperties);
+    VULKAN_FN(vkGetPhysicalDeviceImageFormatProperties);
+    VULKAN_FN(vkGetPhysicalDeviceMemoryProperties);
+    VULKAN_FN(vkGetPhysicalDeviceProperties);
+    VULKAN_FN(vkGetPhysicalDeviceQueueFamilyProperties);
+    VULKAN_FN(vkGetPhysicalDeviceSparseImageFormatProperties);
+    
+    #ifdef VK_KHR_surface
+    #ifdef VK_USE_PLATFORM_XCB_KHR
+    VULKAN_FN(vkCreateXcbSurfaceKHR);
+    VULKAN_FN(vkGetPhysicalDeviceXcbPresentationSupportKHR);
+    #endif
+    
+    #ifdef VK_USE_PLATFORM_XLIB_KHR
+    VULKAN_FN(vkCreateXlibSurfaceKHR);
+    VULKAN_FN(vkGetPhysicalDeviceXlibPresentationSupportKHR);
+    #endif
+    
+    #ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    VULKAN_FN(vkCreateWaylandSurfaceKHR);
+    VULKAN_FN(vkGetPhysicalDeviceWaylandPresentationSupportKHR);
+    #endif
+    
+    #ifdef VK_USE_PLATFORM_WIN32_KHR
+    VULKAN_FN(vkCreateWin32SurfaceKHR);
+    VULKAN_FN(vkGetPhysicalDeviceWin32PresentationSupportKHR);
+    #endif
+    
+    VULKAN_FN(vkDestroySurfaceKHR);
+    
+    VULKAN_FN(vkGetPhysicalDeviceSurfaceSupportKHR);
+    VULKAN_FN(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
+    VULKAN_FN(vkGetPhysicalDeviceSurfaceFormatsKHR);
+    VULKAN_FN(vkGetPhysicalDeviceSurfacePresentModesKHR);
+    #endif
+    
+    #ifdef VK_EXT_debug_report
+    VULKAN_FN(vkCreateDebugReportCallbackEXT);
+    VULKAN_FN(vkDestroyDebugReportCallbackEXT);
+    VULKAN_FN(vkDebugReportMessageEXT);
+    #endif
+  };
+  
+  
+  /**
+   * \brief Vulkan device functions
+   * 
+   * Vulkan functions for a specific Vulkan device.
+   * This ensures that no slow dispatch code is executed.
+   */
+  struct DeviceFn : DeviceLoader {
+    DeviceFn(VkInstance instance, VkDevice device);
+    ~DeviceFn();
+    
+    VULKAN_FN(vkDestroyDevice);
+    VULKAN_FN(vkGetDeviceQueue);
+    VULKAN_FN(vkQueueSubmit);
+    VULKAN_FN(vkQueueWaitIdle);
+    VULKAN_FN(vkDeviceWaitIdle);
+    VULKAN_FN(vkAllocateMemory);
+    VULKAN_FN(vkFreeMemory);
+    VULKAN_FN(vkMapMemory);
+    VULKAN_FN(vkUnmapMemory);
+    VULKAN_FN(vkFlushMappedMemoryRanges);
+    VULKAN_FN(vkInvalidateMappedMemoryRanges);
+    VULKAN_FN(vkGetDeviceMemoryCommitment);
+    VULKAN_FN(vkBindBufferMemory);
+    VULKAN_FN(vkBindImageMemory);
+    VULKAN_FN(vkGetBufferMemoryRequirements);
+    VULKAN_FN(vkGetImageMemoryRequirements);
+    VULKAN_FN(vkGetImageSparseMemoryRequirements);
+    VULKAN_FN(vkQueueBindSparse);
+    VULKAN_FN(vkCreateFence);
+    VULKAN_FN(vkDestroyFence);
+    VULKAN_FN(vkResetFences);
+    VULKAN_FN(vkGetFenceStatus);
+    VULKAN_FN(vkWaitForFences);
+    VULKAN_FN(vkCreateSemaphore);
+    VULKAN_FN(vkDestroySemaphore);
+    VULKAN_FN(vkCreateEvent);
+    VULKAN_FN(vkDestroyEvent);
+    VULKAN_FN(vkGetEventStatus);
+    VULKAN_FN(vkSetEvent);
+    VULKAN_FN(vkResetEvent);
+    VULKAN_FN(vkCreateQueryPool);
+    VULKAN_FN(vkDestroyQueryPool);
+    VULKAN_FN(vkGetQueryPoolResults);
+    VULKAN_FN(vkCreateBuffer);
+    VULKAN_FN(vkDestroyBuffer);
+    VULKAN_FN(vkCreateBufferView);
+    VULKAN_FN(vkDestroyBufferView);
+    VULKAN_FN(vkCreateImage);
+    VULKAN_FN(vkDestroyImage);
+    VULKAN_FN(vkGetImageSubresourceLayout);
+    VULKAN_FN(vkCreateImageView);
+    VULKAN_FN(vkDestroyImageView);
+    VULKAN_FN(vkCreateShaderModule);
+    VULKAN_FN(vkDestroyShaderModule);
+    VULKAN_FN(vkCreatePipelineCache);
+    VULKAN_FN(vkDestroyPipelineCache);
+    VULKAN_FN(vkGetPipelineCacheData);
+    VULKAN_FN(vkMergePipelineCaches);
+    VULKAN_FN(vkCreateGraphicsPipelines);
+    VULKAN_FN(vkCreateComputePipelines);
+    VULKAN_FN(vkDestroyPipeline);
+    VULKAN_FN(vkCreatePipelineLayout);
+    VULKAN_FN(vkDestroyPipelineLayout);
+    VULKAN_FN(vkCreateSampler);
+    VULKAN_FN(vkDestroySampler);
+    VULKAN_FN(vkCreateDescriptorSetLayout);
+    VULKAN_FN(vkDestroyDescriptorSetLayout);
+    VULKAN_FN(vkCreateDescriptorPool);
+    VULKAN_FN(vkDestroyDescriptorPool);
+    VULKAN_FN(vkResetDescriptorPool);
+    VULKAN_FN(vkAllocateDescriptorSets);
+    VULKAN_FN(vkFreeDescriptorSets);
+    VULKAN_FN(vkUpdateDescriptorSets);
+    VULKAN_FN(vkCreateFramebuffer);
+    VULKAN_FN(vkDestroyFramebuffer);
+    VULKAN_FN(vkCreateRenderPass);
+    VULKAN_FN(vkDestroyRenderPass);
+    VULKAN_FN(vkGetRenderAreaGranularity);
+    VULKAN_FN(vkCreateCommandPool);
+    VULKAN_FN(vkDestroyCommandPool);
+    VULKAN_FN(vkResetCommandPool);
+    VULKAN_FN(vkAllocateCommandBuffers);
+    VULKAN_FN(vkFreeCommandBuffers);
+    VULKAN_FN(vkBeginCommandBuffer);
+    VULKAN_FN(vkEndCommandBuffer);
+    VULKAN_FN(vkResetCommandBuffer);
+    VULKAN_FN(vkCmdBindPipeline);
+    VULKAN_FN(vkCmdSetViewport);
+    VULKAN_FN(vkCmdSetScissor);
+    VULKAN_FN(vkCmdSetLineWidth);
+    VULKAN_FN(vkCmdSetDepthBias);
+    VULKAN_FN(vkCmdSetBlendConstants);
+    VULKAN_FN(vkCmdSetDepthBounds);
+    VULKAN_FN(vkCmdSetStencilCompareMask);
+    VULKAN_FN(vkCmdSetStencilWriteMask);
+    VULKAN_FN(vkCmdSetStencilReference);
+    VULKAN_FN(vkCmdBindDescriptorSets);
+    VULKAN_FN(vkCmdBindIndexBuffer);
+    VULKAN_FN(vkCmdBindVertexBuffers);
+    VULKAN_FN(vkCmdDraw);
+    VULKAN_FN(vkCmdDrawIndexed);
+    VULKAN_FN(vkCmdDrawIndirect);
+    VULKAN_FN(vkCmdDrawIndexedIndirect);
+    VULKAN_FN(vkCmdDispatch);
+    VULKAN_FN(vkCmdDispatchIndirect);
+    VULKAN_FN(vkCmdCopyBuffer);
+    VULKAN_FN(vkCmdCopyImage);
+    VULKAN_FN(vkCmdBlitImage);
+    VULKAN_FN(vkCmdCopyBufferToImage);
+    VULKAN_FN(vkCmdCopyImageToBuffer);
+    VULKAN_FN(vkCmdUpdateBuffer);
+    VULKAN_FN(vkCmdFillBuffer);
+    VULKAN_FN(vkCmdClearColorImage);
+    VULKAN_FN(vkCmdClearDepthStencilImage);
+    VULKAN_FN(vkCmdClearAttachments);
+    VULKAN_FN(vkCmdResolveImage);
+    VULKAN_FN(vkCmdSetEvent);
+    VULKAN_FN(vkCmdResetEvent);
+    VULKAN_FN(vkCmdWaitEvents);
+    VULKAN_FN(vkCmdPipelineBarrier);
+    VULKAN_FN(vkCmdBeginQuery);
+    VULKAN_FN(vkCmdEndQuery);
+    VULKAN_FN(vkCmdResetQueryPool);
+    VULKAN_FN(vkCmdWriteTimestamp);
+    VULKAN_FN(vkCmdCopyQueryPoolResults);
+    VULKAN_FN(vkCmdPushConstants);
+    VULKAN_FN(vkCmdBeginRenderPass);
+    VULKAN_FN(vkCmdNextSubpass);
+    VULKAN_FN(vkCmdEndRenderPass);
+    VULKAN_FN(vkCmdExecuteCommands);
+    
+    #ifdef VK_KHR_swapchain
+    VULKAN_FN(vkCreateSwapchainKHR);
+    VULKAN_FN(vkDestroySwapchainKHR);
+    VULKAN_FN(vkGetSwapchainImagesKHR);
+    VULKAN_FN(vkAcquireNextImageKHR);
+    VULKAN_FN(vkQueuePresentKHR);
+    #endif
+  };
+  
+}
\ No newline at end of file
diff --git a/src/dxvk/vulkan/dxvk_vulkan_loader_fn.h b/src/dxvk/vulkan/dxvk_vulkan_loader_fn.h
new file mode 100644
index 000000000..105d0d9f2
--- /dev/null
+++ b/src/dxvk/vulkan/dxvk_vulkan_loader_fn.h
@@ -0,0 +1,48 @@
+#pragma once
+
+#define VK_USE_PLATFORM_WIN32_KHR 1
+#include <vulkan/vulkan.h>
+
+#define VULKAN_FN(name) \
+  VulkanFn<::PFN_ ## name> name = sym(#name)
+
+namespace dxvk::vk {
+  
+  template<typename Fn>
+  class VulkanFn;
+  
+  /**
+   * \brief Vulkan function
+   * 
+   * Wraps an Vulkan function pointer and provides
+   * a call operator using the correct types.
+   */
+  template<typename Ret, typename... Args>
+  class VulkanFn<Ret (VKAPI_PTR*)(Args...)> {
+    using Fn = Ret (VKAPI_PTR*)(Args...);
+  public:
+    
+    VulkanFn() { }
+    VulkanFn(Fn ptr)
+    : m_fn(ptr) { }
+    
+    VulkanFn(PFN_vkVoidFunction ptr)
+    : m_fn(reinterpret_cast<Fn>(ptr)) { }
+    
+    /**
+     * \brief Invokes Vulkan function
+     * 
+     * \param [in] args Arguments
+     * \returns Function return value
+     */
+    Ret operator () (Args... args) const {
+      return (*m_fn)(args...);
+    }
+    
+  private:
+    
+    Fn m_fn = nullptr;
+    
+  };
+  
+}
diff --git a/src/meson.build b/src/meson.build
new file mode 100644
index 000000000..185ea80b7
--- /dev/null
+++ b/src/meson.build
@@ -0,0 +1,2 @@
+subdir('util')
+subdir('dxvk')
\ No newline at end of file
diff --git a/src/util/log/log.cpp b/src/util/log/log.cpp
new file mode 100644
index 000000000..537784fd4
--- /dev/null
+++ b/src/util/log/log.cpp
@@ -0,0 +1,26 @@
+#include "log.h"
+
+namespace dxvk {
+  
+  Log::Log(const std::string& filename)
+  : m_stream(filename, std::ios_base::out | std::ios_base::trunc) {
+    
+  }
+  
+  
+  Log::~Log() {
+    
+  }
+  
+  
+  void Log::log(const std::string& message) {
+    std::lock_guard<std::mutex> lock(m_mutex);
+    
+    std::cerr << message << std::endl;
+    std::cerr.flush();
+    
+    m_stream  << message << std::endl;
+    m_stream.flush();
+  }
+  
+}
\ No newline at end of file
diff --git a/src/util/log/log.h b/src/util/log/log.h
new file mode 100644
index 000000000..7d3f454e1
--- /dev/null
+++ b/src/util/log/log.h
@@ -0,0 +1,40 @@
+#pragma once
+
+#include <fstream>
+#include <iostream>
+#include <mutex>
+#include <string>
+
+#include "../rc/util_rc.h"
+
+namespace dxvk {
+  
+  /**
+   * \brief Logger
+   * 
+   * Logs messages generated by DXVK and
+   * the client APIs using DXVK.
+   */
+  class Log : public RcObject {
+    
+  public:
+    
+    Log(const std::string& filename);
+    ~Log();
+    
+    /**
+     * \brief Adds a message to the log
+     * 
+     * Prints the message to stderr and appends
+     * it to the log file at the same time.
+     */
+    void log(const std::string& message);
+    
+  private:
+    
+    std::mutex    m_mutex;
+    std::fstream  m_stream;
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/util/meson.build b/src/util/meson.build
new file mode 100644
index 000000000..0063094f8
--- /dev/null
+++ b/src/util/meson.build
@@ -0,0 +1,6 @@
+util_src = files([
+  'log/log.cpp',
+])
+
+util_lib = static_library('util', util_src,
+  include_directories : [ dxvk_include_path ])
diff --git a/src/util/rc/util_rc.h b/src/util/rc/util_rc.h
new file mode 100644
index 000000000..c33d0c614
--- /dev/null
+++ b/src/util/rc/util_rc.h
@@ -0,0 +1,36 @@
+#pragma once
+
+#include <atomic>
+
+namespace dxvk {
+  
+  /**
+   * \brief Reference-counted object
+   */
+  class RcObject {
+    
+  public:
+    
+    /**
+     * \brief Increments reference count
+     * \returns New reference count
+     */
+    uint32_t incRef() {
+      return ++m_refCount;
+    }
+    
+    /**
+     * \brief Decrements reference count
+     * \returns New reference count
+     */
+    uint32_t decRef() {
+      return --m_refCount;
+    }
+    
+  private:
+    
+    std::atomic<uint32_t> m_refCount = { 0u };
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/util/rc/util_rc_ptr.h b/src/util/rc/util_rc_ptr.h
new file mode 100644
index 000000000..e19e78a86
--- /dev/null
+++ b/src/util/rc/util_rc_ptr.h
@@ -0,0 +1,126 @@
+#pragma once
+
+#include <functional>
+
+namespace dxvk {
+  
+  /**
+   * \brief Pointer for reference-counted objects
+   * 
+   * This only requires the given type to implement \c incRef
+   * and \c decRef methods that adjust the reference count.
+   * \tparam T Object type
+   */
+  template<typename T>
+  class Rc {
+    template<typename Tx>
+    friend class Rc;
+  public:
+    
+    Rc() { }
+    Rc(std::nullptr_t) { }
+    
+    Rc(T* object)
+    : m_object(object) {
+      this->incRef();
+    }
+    
+    Rc(const Rc& other)
+    : m_object(other.m_object) {
+      this->incRef();
+    }
+    
+    template<typename Tx>
+    Rc(const Rc<Tx>& other)
+    : m_object(other.m_object) {
+      this->incRef();
+    }
+    
+    Rc(Rc&& other)
+    : m_object(other.m_object) {
+      other.m_object = nullptr;
+    }
+    
+    template<typename Tx>
+    Rc(Rc<Tx>&& other)
+    : m_object(other.m_object) {
+      other.m_object = nullptr;
+    }
+    
+    Rc& operator = (std::nullptr_t) {
+      this->decRef();
+      m_object = nullptr;
+      return *this;
+    }
+    
+    Rc& operator = (const Rc& other) {
+      other.incRef();
+      this->decRef();
+      m_object = other.m_object;
+      return *this;
+    }
+    
+    template<typename Tx>
+    Rc& operator = (const Rc<Tx>& other) {
+      other.incRef();
+      this->decRef();
+      m_object = other.m_object;
+      return *this;
+    }
+    
+    Rc& operator = (Rc&& other) {
+      this->decRef();
+      this->m_object = other.m_object;
+      other.m_object = nullptr;
+      return *this;
+    }
+    
+    template<typename Tx>
+    Rc& operator = (Rc<Tx>&& other) {
+      this->decRef();
+      this->m_object = other.m_object;
+      other.m_object = nullptr;
+      return *this;
+    }
+    
+    ~Rc() {
+      this->decRef();
+    }
+    
+    T& operator *  () const { return *m_object; }
+    T* operator -> () const { return  m_object; }
+    T* ptr() const { return m_object; }
+    
+    bool operator == (const Rc& other) const { return m_object == other.m_object; }
+    bool operator != (const Rc& other) const { return m_object != other.m_object; }
+    
+    bool operator == (std::nullptr_t) const { return m_object == nullptr; }
+    bool operator != (std::nullptr_t) const { return m_object != nullptr; }
+    
+  private:
+    
+    T* m_object = nullptr;
+    
+    void incRef() const {
+      if (m_object != nullptr)
+        m_object->incRef();
+    }
+    
+    void decRef() const {
+      if (m_object != nullptr) {
+        if (m_object->decRef() == 0)
+          delete m_object;
+      }
+    }
+    
+  };
+  
+  
+  template<typename T>
+  struct RcHash {
+    size_t operator () (const Rc<T>& rc) const {
+      return std::hash<T*>()(rc.ptr());
+    }
+  };
+  
+}
\ No newline at end of file
diff --git a/src/util/util_error.h b/src/util/util_error.h
new file mode 100644
index 000000000..2cfd45ffd
--- /dev/null
+++ b/src/util/util_error.h
@@ -0,0 +1,31 @@
+#pragma once
+
+#include <string>
+
+namespace dxvk {
+  
+  /**
+   * \brief DXVK error
+   * 
+   * A generic exception class that stores a
+   * message. Exceptions should be logged.
+   */
+  class DxvkError {
+    
+  public:
+    
+    DxvkError() { }
+    DxvkError(std::string&& message)
+    : m_message(std::move(message)) { }
+    
+    const std::string& message() const {
+      return m_message;
+    }
+    
+  private:
+    
+    std::string m_message;
+    
+  };
+  
+}
\ No newline at end of file
diff --git a/src/util/util_math.h b/src/util/util_math.h
new file mode 100644
index 000000000..c0b155fa1
--- /dev/null
+++ b/src/util/util_math.h
@@ -0,0 +1,12 @@
+#pragma once
+
+namespace dxvk {
+  
+  template<typename T>
+  T clamp(T n, T lo, T hi) {
+    if (n < lo) return lo;
+    if (n > hi) return hi;
+    return n;
+  }
+  
+}
\ No newline at end of file
diff --git a/src/util/util_string.h b/src/util/util_string.h
new file mode 100644
index 000000000..366c393be
--- /dev/null
+++ b/src/util/util_string.h
@@ -0,0 +1,23 @@
+#pragma once
+
+#include <string>
+#include <sstream>
+
+namespace dxvk::str {
+  
+  inline void format1(std::stringstream&) { }
+  
+  template<typename T, typename... Tx>
+  void format1(std::stringstream& str, const T& arg, const Tx&... args) {
+    str << arg;
+    format1(str, args...);
+  }
+  
+  template<typename... Args>
+  std::string format(const Args&... args) {
+    std::stringstream stream;
+    format1(stream, args...);
+    return stream.str();
+  }
+  
+}
diff --git a/tests/dxvk/meson.build b/tests/dxvk/meson.build
new file mode 100644
index 000000000..91b61d1b5
--- /dev/null
+++ b/tests/dxvk/meson.build
@@ -0,0 +1,3 @@
+test_dxvk_deps = [ dxvk_dep ]
+
+executable('dxvk-triangle', files('test_dxvk_triangle.cpp'), dependencies: test_dxvk_deps)
\ No newline at end of file
diff --git a/tests/dxvk/test_dxvk_triangle.cpp b/tests/dxvk/test_dxvk_triangle.cpp
new file mode 100644
index 000000000..3ca060c5b
--- /dev/null
+++ b/tests/dxvk/test_dxvk_triangle.cpp
@@ -0,0 +1,128 @@
+#include <dxvk_framebuffer.h>
+#include <dxvk_instance.h>
+#include <dxvk_main.h>
+#include <dxvk_surface.h>
+
+#include <windows.h>
+#include <windowsx.h>
+
+using namespace dxvk;
+
+class TriangleApp {
+  
+public:
+  
+  TriangleApp(HINSTANCE instance, HWND window)
+  : m_dxvkInstance    (new DxvkInstance()),
+    m_dxvkAdapter     (m_dxvkInstance->enumAdapters().at(0)),
+    m_dxvkDevice      (m_dxvkAdapter->createDevice()),
+    m_dxvkSurface     (m_dxvkAdapter->createSurface(instance, window)),
+    m_dxvkSwapchain   (m_dxvkDevice->createSwapchain(m_dxvkSurface,
+      DxvkSwapchainProperties {
+        VkSurfaceFormatKHR { VK_FORMAT_B8G8R8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR },
+        VK_PRESENT_MODE_FIFO_KHR,
+        VkExtent2D { 640, 480 },
+      })),
+    m_dxvkContext     (m_dxvkDevice->createContext()),
+    m_dxvkCommandList (m_dxvkDevice->createCommandList()) {
+    
+  }
+  
+  ~TriangleApp() {
+    
+  }
+  
+  void run() {
+    auto sync1 = m_dxvkDevice->createSemaphore();
+    auto sync2 = m_dxvkDevice->createSemaphore();
+    
+    m_dxvkContext->beginRecording(m_dxvkCommandList);
+    m_dxvkContext->setFramebuffer(
+      m_dxvkSwapchain->getFramebuffer(sync1));
+    m_dxvkContext->endRecording();
+    
+    auto fence = m_dxvkDevice->submitCommandList(
+      m_dxvkCommandList, sync1, sync2);
+    m_dxvkSwapchain->present(sync2);
+    m_dxvkDevice->waitForIdle();
+    m_dxvkCommandList->reset();
+  }
+  
+private:
+  
+  Rc<DxvkInstance>    m_dxvkInstance;
+  Rc<DxvkAdapter>     m_dxvkAdapter;
+  Rc<DxvkDevice>      m_dxvkDevice;
+  Rc<DxvkSurface>     m_dxvkSurface;
+  Rc<DxvkSwapchain>   m_dxvkSwapchain;
+  Rc<DxvkContext>     m_dxvkContext;
+  Rc<DxvkCommandList> m_dxvkCommandList;
+  
+  Rc<DxvkBuffer>      m_vertexBuffer;
+  
+};
+
+LRESULT CALLBACK WindowProc(HWND hWnd,
+                            UINT message,
+                            WPARAM wParam,
+                            LPARAM lParam);
+
+int WINAPI WinMain(HINSTANCE hInstance,
+                   HINSTANCE hPrevInstance,
+                   LPSTR lpCmdLine,
+                   int nCmdShow) {
+  HWND hWnd;
+  WNDCLASSEXW wc;
+  ZeroMemory(&wc, sizeof(WNDCLASSEX));
+  wc.cbSize = sizeof(WNDCLASSEX);
+  wc.style = CS_HREDRAW | CS_VREDRAW;
+  wc.lpfnWndProc = WindowProc;
+  wc.hInstance = hInstance;
+  wc.hCursor = LoadCursor(nullptr, IDC_ARROW);
+  wc.hbrBackground = (HBRUSH)COLOR_WINDOW;
+  wc.lpszClassName = L"WindowClass1";
+  RegisterClassExW(&wc);
+
+  hWnd = CreateWindowExW(0,
+    L"WindowClass1",
+    L"Our First Windowed Program",
+    WS_OVERLAPPEDWINDOW,
+    300, 300,
+    640, 480,
+    nullptr,
+    nullptr,
+    hInstance,
+    nullptr);
+  ShowWindow(hWnd, nCmdShow);
+
+  MSG msg;
+  
+  TriangleApp app(hInstance, hWnd);
+  
+  try {
+    while (true) {
+      if (PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE)) {
+        TranslateMessage(&msg);
+        DispatchMessage(&msg);
+        
+        if (msg.message == WM_QUIT)
+          return msg.wParam;
+      } else {
+        app.run();
+      }
+    }
+  } catch (const dxvk::DxvkError& e) {
+    dxvk::log(e.message());
+    return msg.wParam;
+  }
+}
+
+LRESULT CALLBACK WindowProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) {
+  switch (message) {
+    case WM_CLOSE:
+      PostQuitMessage(0);
+      return 0;
+  }
+
+  return DefWindowProc(hWnd, message, wParam, lParam);
+}
diff --git a/tests/meson.build b/tests/meson.build
new file mode 100644
index 000000000..22ba44794
--- /dev/null
+++ b/tests/meson.build
@@ -0,0 +1 @@
+subdir('dxvk')
\ No newline at end of file